repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
0x7678/openbts-
|
refs/heads/master
|
RRLP/tests/runtest.py
|
2
|
#!/usr/bin/python
from __future__ import with_statement
import sys
import os
import struct
executable='rrlpconverter'
def binary(x):
n = bin(x)[2:]
if len(n)%8 != 0:
n = '0'*(8 - len(n)%8) + n
return n
def hex2str(h):
""" h is a string of hexadecimal chars
returns a string whose hexadecimal representation is h """
assert(len(h)%2==0)
return ''.join(chr(int(h[i:i+2],16)) for i in xrange(0,len(h),2))
def bin2str(b):
assert(len(b)%8==0)
return ''.join(chr(int(b[i:i+8],2)) for i in xrange(0,len(b),8))
def str2hex(s):
return ''.join('%02X' % ord(c) for c in s)
def main():
s = ''.join(sys.argv[1:])
if set(s) == set(['0','1']):
d = bin2str(s)
b = s
else:
d = hex2str(s)
b = binary(int(sys.argv[-1], 16))
with open('temp.per', 'wb+') as fd:
fd.write(d)
print "rrreccceoooooOmmaaaaaaappmmmu___"
print b
print str2hex(d)
os.system('%s -oxer -iper temp.per' % executable)
def findnull():
for i in [1,2,3]:
pass
if __name__ == '__main__':
main()
|
nhicher/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/spotinst/spotinst_aws_elastigroup.py
|
8
|
#!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: spotinst_aws_elastigroup
version_added: 2.5
short_description: Create, update or delete Spotinst AWS Elastigroups
author: Spotinst
description:
- Can create, update, or delete Spotinst AWS Elastigroups
Launch configuration is part of the elastigroup configuration,
so no additional modules are necessary for handling the launch configuration.
You will have to have a credentials file in this location - <home>/.spotinst/credentials
The credentials file must contain a row that looks like this
token = <YOUR TOKEN>
Full documentation available at https://help.spotinst.com/hc/en-us/articles/115003530285-Ansible-
requirements:
- spotinst >= 1.0.21
- python >= 2.7
- spotinst_sdk >= 1.0.38
options:
credentials_path:
description:
- (String) Optional parameter that allows to set a non-default credentials path.
Default is ~/.spotinst/credentials
account_id:
description:
- (String) Optional parameter that allows to set an account-id inside the module configuration
By default this is retrieved from the credentials path
availability_vs_cost:
choices:
- availabilityOriented
- costOriented
- balanced
description:
- (String) The strategy orientation.
required: true
availability_zones:
description:
- (List of Objects) a list of hash/dictionaries of Availability Zones that are configured in the elastigroup;
'[{"key":"value", "key":"value"}]';
keys allowed are
name (String),
subnet_id (String),
placement_group_name (String),
required: true
block_device_mappings:
description:
- (List of Objects) a list of hash/dictionaries of Block Device Mappings for elastigroup instances;
You can specify virtual devices and EBS volumes.;
'[{"key":"value", "key":"value"}]';
keys allowed are
device_name (List of Strings),
virtual_name (String),
no_device (String),
ebs (Object, expects the following keys-
delete_on_termination(Boolean),
encrypted(Boolean),
iops (Integer),
snapshot_id(Integer),
volume_type(String),
volume_size(Integer))
chef:
description:
- (Object) The Chef integration configuration.;
Expects the following keys - chef_server (String),
organization (String),
user (String),
pem_key (String),
chef_version (String)
draining_timeout:
description:
- (Integer) Time for instance to be drained from incoming requests and deregistered from ELB before termination.
ebs_optimized:
description:
- (Boolean) Enable EBS optimization for supported instances which are not enabled by default.;
Note - additional charges will be applied.
ebs_volume_pool:
description:
- (List of Objects) a list of hash/dictionaries of EBS devices to reattach to the elastigroup when available;
'[{"key":"value", "key":"value"}]';
keys allowed are -
volume_ids (List of Strings),
device_name (String)
ecs:
description:
- (Object) The ECS integration configuration.;
Expects the following key -
cluster_name (String)
elastic_ips:
description:
- (List of Strings) List of ElasticIps Allocation Ids (Example C(eipalloc-9d4e16f8)) to associate to the group instances
fallback_to_od:
description:
- (Boolean) In case of no spots available, Elastigroup will launch an On-demand instance instead
health_check_grace_period:
description:
- (Integer) The amount of time, in seconds, after the instance has launched to start and check its health.
default: 300
health_check_unhealthy_duration_before_replacement:
description:
- (Integer) Minimal mount of time instance should be unhealthy for us to consider it unhealthy.
health_check_type:
choices:
- ELB
- HCS
- TARGET_GROUP
- MLB
- EC2
description:
- (String) The service to use for the health check.
iam_role_name:
description:
- (String) The instance profile iamRole name
- Only use iam_role_arn, or iam_role_name
iam_role_arn:
description:
- (String) The instance profile iamRole arn
- Only use iam_role_arn, or iam_role_name
id:
description:
- (String) The group id if it already exists and you want to update, or delete it.
This will not work unless the uniqueness_by field is set to id.
When this is set, and the uniqueness_by field is set, the group will either be updated or deleted, but not created.
ignore_changes:
choices:
- image_id
- target
description:
- (List of Strings) list of fields on which changes should be ignored when updating
image_id:
description:
- (String) The image Id used to launch the instance.;
In case of conflict between Instance type and image type, an error will be returned
required: true
key_pair:
description:
- (String) Specify a Key Pair to attach to the instances
required: true
kubernetes:
description:
- (Object) The Kubernetes integration configuration.
Expects the following keys -
api_server (String),
token (String)
lifetime_period:
description:
- (String) lifetime period
load_balancers:
description:
- (List of Strings) List of classic ELB names
max_size:
description:
- (Integer) The upper limit number of instances that you can scale up to
required: true
mesosphere:
description:
- (Object) The Mesosphere integration configuration.
Expects the following key -
api_server (String)
min_size:
description:
- (Integer) The lower limit number of instances that you can scale down to
required: true
monitoring:
description:
- (Boolean) Describes whether instance Enhanced Monitoring is enabled
required: true
name:
description:
- (String) Unique name for elastigroup to be created, updated or deleted
required: true
network_interfaces:
description:
- (List of Objects) a list of hash/dictionaries of network interfaces to add to the elastigroup;
'[{"key":"value", "key":"value"}]';
keys allowed are -
description (String),
device_index (Integer),
secondary_private_ip_address_count (Integer),
associate_public_ip_address (Boolean),
delete_on_termination (Boolean),
groups (List of Strings),
network_interface_id (String),
private_ip_address (String),
subnet_id (String),
associate_ipv6_address (Boolean),
private_ip_addresses (List of Objects, Keys are privateIpAddress (String, required) and primary (Boolean))
on_demand_count:
description:
- (Integer) Required if risk is not set
- Number of on demand instances to launch. All other instances will be spot instances.;
Either set this parameter or the risk parameter
on_demand_instance_type:
description:
- (String) On-demand instance type that will be provisioned
required: true
opsworks:
description:
- (Object) The elastigroup OpsWorks integration configration.;
Expects the following key -
layer_id (String)
persistence:
description:
- (Object) The Stateful elastigroup configration.;
Accepts the following keys -
should_persist_root_device (Boolean),
should_persist_block_devices (Boolean),
should_persist_private_ip (Boolean)
product:
choices:
- Linux/UNIX
- SUSE Linux
- Windows
- Linux/UNIX (Amazon VPC)
- SUSE Linux (Amazon VPC)
- Windows
description:
- (String) Operation system type._
required: true
rancher:
description:
- (Object) The Rancher integration configuration.;
Expects the following keys -
version (String),
access_key (String),
secret_key (String),
master_host (String)
right_scale:
description:
- (Object) The Rightscale integration configuration.;
Expects the following keys -
account_id (String),
refresh_token (String)
risk:
description:
- (Integer) required if on demand is not set. The percentage of Spot instances to launch (0 - 100).
roll_config:
description:
- (Object) Roll configuration.;
If you would like the group to roll after updating, please use this feature.
Accepts the following keys -
batch_size_percentage(Integer, Required),
grace_period - (Integer, Required),
health_check_type(String, Optional)
scheduled_tasks:
description:
- (List of Objects) a list of hash/dictionaries of scheduled tasks to configure in the elastigroup;
'[{"key":"value", "key":"value"}]';
keys allowed are -
adjustment (Integer),
scale_target_capacity (Integer),
scale_min_capacity (Integer),
scale_max_capacity (Integer),
adjustment_percentage (Integer),
batch_size_percentage (Integer),
cron_expression (String),
frequency (String),
grace_period (Integer),
task_type (String, required),
is_enabled (Boolean)
security_group_ids:
description:
- (List of Strings) One or more security group IDs. ;
In case of update it will override the existing Security Group with the new given array
required: true
shutdown_script:
description:
- (String) The Base64-encoded shutdown script that executes prior to instance termination.
Encode before setting.
signals:
description:
- (List of Objects) a list of hash/dictionaries of signals to configure in the elastigroup;
keys allowed are -
name (String, required),
timeout (Integer)
spin_up_time:
description:
- (Integer) spin up time, in seconds, for the instance
spot_instance_types:
description:
- (List of Strings) Spot instance type that will be provisioned.
required: true
state:
choices:
- present
- absent
description:
- (String) create or delete the elastigroup
tags:
description:
- (List of tagKey:tagValue paris) a list of tags to configure in the elastigroup. Please specify list of keys and values (key colon value);
target:
description:
- (Integer) The number of instances to launch
required: true
target_group_arns:
description:
- (List of Strings) List of target group arns instances should be registered to
tenancy:
choices:
- default
- dedicated
description:
- (String) dedicated vs shared tenancy
terminate_at_end_of_billing_hour:
description:
- (Boolean) terminate at the end of billing hour
unit:
choices:
- instance
- weight
description:
- (String) The capacity unit to launch instances by.
required: true
up_scaling_policies:
description:
- (List of Objects) a list of hash/dictionaries of scaling policies to configure in the elastigroup;
'[{"key":"value", "key":"value"}]';
keys allowed are -
policy_name (String, required),
namespace (String, required),
metric_name (String, required),
dimensions (List of Objects, Keys allowed are name (String, required) and value (String)),
statistic (String, required)
evaluation_periods (String, required),
period (String, required),
threshold (String, required),
cooldown (String, required),
unit (String, required),
operator (String, required),
action_type (String, required),
adjustment (String),
min_target_capacity (String),
target (String),
maximum (String),
minimum (String)
down_scaling_policies:
description:
- (List of Objects) a list of hash/dictionaries of scaling policies to configure in the elastigroup;
'[{"key":"value", "key":"value"}]';
keys allowed are -
policy_name (String, required),
namespace (String, required),
metric_name (String, required),
dimensions ((List of Objects), Keys allowed are name (String, required) and value (String)),
statistic (String, required),
evaluation_periods (String, required),
period (String, required),
threshold (String, required),
cooldown (String, required),
unit (String, required),
operator (String, required),
action_type (String, required),
adjustment (String),
max_target_capacity (String),
target (String),
maximum (String),
minimum (String)
target_tracking_policies:
description:
- (List of Objects) a list of hash/dictionaries of target tracking policies to configure in the elastigroup;
'[{"key":"value", "key":"value"}]';
keys allowed are -
policy_name (String, required),
namespace (String, required),
source (String, required),
metric_name (String, required),
statistic (String, required),
unit (String, required),
cooldown (String, required),
target (String, required)
uniqueness_by:
choices:
- id
- name
description:
- (String) If your group names are not unique, you may use this feature to update or delete a specific group.
Whenever this property is set, you must set a group_id in order to update or delete a group, otherwise a group will be created.
user_data:
description:
- (String) Base64-encoded MIME user data. Encode before setting the value.
utilize_reserved_instances:
description:
- (Boolean) In case of any available Reserved Instances,
Elastigroup will utilize your reservations before purchasing Spot instances.
wait_for_instances:
description:
- (Boolean) Whether or not the elastigroup creation / update actions should wait for the instances to spin
wait_timeout:
description:
- (Integer) How long the module should wait for instances before failing the action.;
Only works if wait_for_instances is True.
"""
EXAMPLES = '''
# Basic configuration YAML example
- hosts: localhost
tasks:
- name: create elastigroup
spotinst_aws_elastigroup:
state: present
risk: 100
availability_vs_cost: balanced
availability_zones:
- name: us-west-2a
subnet_id: subnet-2b68a15c
image_id: ami-f173cc91
key_pair: spotinst-oregon
max_size: 15
min_size: 0
target: 0
unit: instance
monitoring: True
name: ansible-group
on_demand_instance_type: c3.large
product: Linux/UNIX
load_balancers:
- test-lb-1
security_group_ids:
- sg-8f4b8fe9
spot_instance_types:
- c3.large
do_not_update:
- image_id
- target
register: result
- debug: var=result
# In this example, we create an elastigroup and wait 600 seconds to retrieve the instances, and use their private ips
- hosts: localhost
tasks:
- name: create elastigroup
spotinst_aws_elastigroup:
state: present
account_id: act-1a9dd2b
risk: 100
availability_vs_cost: balanced
availability_zones:
- name: us-west-2a
subnet_id: subnet-2b68a15c
tags:
- Environment: someEnvValue
- OtherTagKey: otherValue
image_id: ami-f173cc91
key_pair: spotinst-oregon
max_size: 5
min_size: 0
target: 0
unit: instance
monitoring: True
name: ansible-group-tal
on_demand_instance_type: c3.large
product: Linux/UNIX
security_group_ids:
- sg-8f4b8fe9
block_device_mappings:
- device_name: '/dev/sda1'
ebs:
volume_size: 100
volume_type: gp2
spot_instance_types:
- c3.large
do_not_update:
- image_id
wait_for_instances: True
wait_timeout: 600
register: result
- name: Store private ips to file
shell: echo {{ item.private_ip }}\\n >> list-of-private-ips
with_items: "{{ result.instances }}"
- debug: var=result
# In this example, we create an elastigroup with multiple block device mappings, tags, and also an account id
# In organizations with more than one account, it is required to specify an account_id
- hosts: localhost
tasks:
- name: create elastigroup
spotinst_aws_elastigroup:
state: present
account_id: act-1a9dd2b
risk: 100
availability_vs_cost: balanced
availability_zones:
- name: us-west-2a
subnet_id: subnet-2b68a15c
tags:
- Environment: someEnvValue
- OtherTagKey: otherValue
image_id: ami-f173cc91
key_pair: spotinst-oregon
max_size: 5
min_size: 0
target: 0
unit: instance
monitoring: True
name: ansible-group-tal
on_demand_instance_type: c3.large
product: Linux/UNIX
security_group_ids:
- sg-8f4b8fe9
block_device_mappings:
- device_name: '/dev/xvda'
ebs:
volume_size: 60
volume_type: gp2
- device_name: '/dev/xvdb'
ebs:
volume_size: 120
volume_type: gp2
spot_instance_types:
- c3.large
do_not_update:
- image_id
wait_for_instances: True
wait_timeout: 600
register: result
- name: Store private ips to file
shell: echo {{ item.private_ip }}\\n >> list-of-private-ips
with_items: "{{ result.instances }}"
- debug: var=result
# In this example we have set up block device mapping with ephemeral devices
- hosts: localhost
tasks:
- name: create elastigroup
spotinst_aws_elastigroup:
state: present
risk: 100
availability_vs_cost: balanced
availability_zones:
- name: us-west-2a
subnet_id: subnet-2b68a15c
image_id: ami-f173cc91
key_pair: spotinst-oregon
max_size: 15
min_size: 0
target: 0
unit: instance
block_device_mappings:
- device_name: '/dev/xvda'
virtual_name: ephemeral0
- device_name: '/dev/xvdb/'
virtual_name: ephemeral1
monitoring: True
name: ansible-group
on_demand_instance_type: c3.large
product: Linux/UNIX
load_balancers:
- test-lb-1
security_group_ids:
- sg-8f4b8fe9
spot_instance_types:
- c3.large
do_not_update:
- image_id
- target
register: result
- debug: var=result
# In this example we create a basic group configuration with a network interface defined.
# Each network interface must have a device index
- hosts: localhost
tasks:
- name: create elastigroup
spotinst_aws_elastigroup:
state: present
risk: 100
availability_vs_cost: balanced
network_interfaces:
- associate_public_ip_address: true
device_index: 0
availability_zones:
- name: us-west-2a
subnet_id: subnet-2b68a15c
image_id: ami-f173cc91
key_pair: spotinst-oregon
max_size: 15
min_size: 0
target: 0
unit: instance
monitoring: True
name: ansible-group
on_demand_instance_type: c3.large
product: Linux/UNIX
load_balancers:
- test-lb-1
security_group_ids:
- sg-8f4b8fe9
spot_instance_types:
- c3.large
do_not_update:
- image_id
- target
register: result
- debug: var=result
# In this example we create a basic group configuration with a target tracking scaling policy defined
- hosts: localhost
tasks:
- name: create elastigroup
spotinst_aws_elastigroup:
account_id: act-92d45673
state: present
risk: 100
availability_vs_cost: balanced
availability_zones:
- name: us-west-2a
subnet_id: subnet-79da021e
image_id: ami-f173cc91
fallback_to_od: true
tags:
- Creator: ValueOfCreatorTag
- Environment: ValueOfEnvironmentTag
key_pair: spotinst-labs-oregon
max_size: 10
min_size: 0
target: 2
unit: instance
monitoring: True
name: ansible-group-1
on_demand_instance_type: c3.large
product: Linux/UNIX
security_group_ids:
- sg-46cdc13d
spot_instance_types:
- c3.large
target_tracking_policies:
- policy_name: target-tracking-1
namespace: AWS/EC2
metric_name: CPUUtilization
statistic: average
unit: percent
target: 50
cooldown: 120
do_not_update:
- image_id
register: result
- debug: var=result
'''
RETURN = '''
---
instances:
description: List of active elastigroup instances and their details.
returned: success
type: dict
sample: [
{
"spotInstanceRequestId": "sir-regs25zp",
"instanceId": "i-09640ad8678234c",
"instanceType": "m4.large",
"product": "Linux/UNIX",
"availabilityZone": "us-west-2b",
"privateIp": "180.0.2.244",
"createdAt": "2017-07-17T12:46:18.000Z",
"status": "fulfilled"
}
]
group_id:
description: Created / Updated group's ID.
returned: success
type: string
sample: "sig-12345"
'''
HAS_SPOTINST_SDK = False
__metaclass__ = type
import os
import time
from ansible.module_utils.basic import AnsibleModule
try:
import spotinst
from spotinst import SpotinstClientException
HAS_SPOTINST_SDK = True
except ImportError:
pass
eni_fields = ('description',
'device_index',
'secondary_private_ip_address_count',
'associate_public_ip_address',
'delete_on_termination',
'groups',
'network_interface_id',
'private_ip_address',
'subnet_id',
'associate_ipv6_address')
private_ip_fields = ('private_ip_address',
'primary')
capacity_fields = (dict(ansible_field_name='min_size',
spotinst_field_name='minimum'),
dict(ansible_field_name='max_size',
spotinst_field_name='maximum'),
'target',
'unit')
lspec_fields = ('user_data',
'key_pair',
'tenancy',
'shutdown_script',
'monitoring',
'ebs_optimized',
'image_id',
'health_check_type',
'health_check_grace_period',
'health_check_unhealthy_duration_before_replacement',
'security_group_ids')
iam_fields = (dict(ansible_field_name='iam_role_name',
spotinst_field_name='name'),
dict(ansible_field_name='iam_role_arn',
spotinst_field_name='arn'))
scheduled_task_fields = ('adjustment',
'adjustment_percentage',
'batch_size_percentage',
'cron_expression',
'frequency',
'grace_period',
'task_type',
'is_enabled',
'scale_target_capacity',
'scale_min_capacity',
'scale_max_capacity')
scaling_policy_fields = ('policy_name',
'namespace',
'metric_name',
'dimensions',
'statistic',
'evaluation_periods',
'period',
'threshold',
'cooldown',
'unit',
'operator')
tracking_policy_fields = ('policy_name',
'namespace',
'source',
'metric_name',
'statistic',
'unit',
'cooldown',
'target',
'threshold')
action_fields = (dict(ansible_field_name='action_type',
spotinst_field_name='type'),
'adjustment',
'min_target_capacity',
'max_target_capacity',
'target',
'minimum',
'maximum')
signal_fields = ('name',
'timeout')
multai_lb_fields = ('balancer_id',
'project_id',
'target_set_id',
'az_awareness',
'auto_weight')
persistence_fields = ('should_persist_root_device',
'should_persist_block_devices',
'should_persist_private_ip')
strategy_fields = ('risk',
'utilize_reserved_instances',
'fallback_to_od',
'on_demand_count',
'availability_vs_cost',
'draining_timeout',
'spin_up_time',
'lifetime_period')
ebs_fields = ('delete_on_termination',
'encrypted',
'iops',
'snapshot_id',
'volume_type',
'volume_size')
bdm_fields = ('device_name',
'virtual_name',
'no_device')
kubernetes_fields = ('api_server',
'token')
right_scale_fields = ('account_id',
'refresh_token')
rancher_fields = ('access_key',
'secret_key',
'master_host',
'version')
chef_fields = ('chef_server',
'organization',
'user',
'pem_key',
'chef_version')
az_fields = ('name',
'subnet_id',
'placement_group_name')
opsworks_fields = ('layer_id',)
scaling_strategy_fields = ('terminate_at_end_of_billing_hour',)
mesosphere_fields = ('api_server',)
ecs_fields = ('cluster_name',)
multai_fields = ('multai_token',)
def handle_elastigroup(client, module):
has_changed = False
should_create = False
group_id = None
message = 'None'
name = module.params.get('name')
state = module.params.get('state')
uniqueness_by = module.params.get('uniqueness_by')
external_group_id = module.params.get('id')
if uniqueness_by == 'id':
if external_group_id is None:
should_create = True
else:
should_create = False
group_id = external_group_id
else:
groups = client.get_elastigroups()
should_create, group_id = find_group_with_same_name(groups, name)
if should_create is True:
if state == 'present':
eg = expand_elastigroup(module, is_update=False)
module.debug(str(" [INFO] " + message + "\n"))
group = client.create_elastigroup(group=eg)
group_id = group['id']
message = 'Created group Successfully.'
has_changed = True
elif state == 'absent':
message = 'Cannot delete non-existent group.'
has_changed = False
else:
eg = expand_elastigroup(module, is_update=True)
if state == 'present':
group = client.update_elastigroup(group_update=eg, group_id=group_id)
message = 'Updated group successfully.'
try:
roll_config = module.params.get('roll_config')
if roll_config:
eg_roll = spotinst.aws_elastigroup.Roll(
batch_size_percentage=roll_config.get('batch_size_percentage'),
grace_period=roll_config.get('grace_period'),
health_check_type=roll_config.get('health_check_type')
)
roll_response = client.roll_group(group_roll=eg_roll, group_id=group_id)
message = 'Updated and started rolling the group successfully.'
except SpotinstClientException as exc:
message = 'Updated group successfully, but failed to perform roll. Error:' + str(exc)
has_changed = True
elif state == 'absent':
try:
client.delete_elastigroup(group_id=group_id)
except SpotinstClientException as exc:
if "GROUP_DOESNT_EXIST" in exc.message:
pass
else:
module.fail_json(msg="Error while attempting to delete group : " + exc.message)
message = 'Deleted group successfully.'
has_changed = True
return group_id, message, has_changed
def retrieve_group_instances(client, module, group_id):
wait_timeout = module.params.get('wait_timeout')
wait_for_instances = module.params.get('wait_for_instances')
if wait_timeout is None:
wait_timeout = 300
wait_timeout = time.time() + wait_timeout
target = module.params.get('target')
state = module.params.get('state')
instances = list()
if state == 'present' and group_id is not None and wait_for_instances is True:
is_amount_fulfilled = False
while is_amount_fulfilled is False and wait_timeout > time.time():
instances = list()
amount_of_fulfilled_instances = 0
active_instances = client.get_elastigroup_active_instances(group_id=group_id)
for active_instance in active_instances:
if active_instance.get('private_ip') is not None:
amount_of_fulfilled_instances += 1
instances.append(active_instance)
if amount_of_fulfilled_instances >= target:
is_amount_fulfilled = True
time.sleep(10)
return instances
def find_group_with_same_name(groups, name):
for group in groups:
if group['name'] == name:
return False, group.get('id')
return True, None
def expand_elastigroup(module, is_update):
do_not_update = module.params['do_not_update']
name = module.params.get('name')
eg = spotinst.aws_elastigroup.Elastigroup()
description = module.params.get('description')
if name is not None:
eg.name = name
if description is not None:
eg.description = description
# Capacity
expand_capacity(eg, module, is_update, do_not_update)
# Strategy
expand_strategy(eg, module)
# Scaling
expand_scaling(eg, module)
# Third party integrations
expand_integrations(eg, module)
# Compute
expand_compute(eg, module, is_update, do_not_update)
# Multai
expand_multai(eg, module)
# Scheduling
expand_scheduled_tasks(eg, module)
return eg
def expand_compute(eg, module, is_update, do_not_update):
elastic_ips = module.params['elastic_ips']
on_demand_instance_type = module.params.get('on_demand_instance_type')
spot_instance_types = module.params['spot_instance_types']
ebs_volume_pool = module.params['ebs_volume_pool']
availability_zones_list = module.params['availability_zones']
product = module.params.get('product')
eg_compute = spotinst.aws_elastigroup.Compute()
if product is not None:
# Only put product on group creation
if is_update is not True:
eg_compute.product = product
if elastic_ips is not None:
eg_compute.elastic_ips = elastic_ips
if on_demand_instance_type or spot_instance_types is not None:
eg_instance_types = spotinst.aws_elastigroup.InstanceTypes()
if on_demand_instance_type is not None:
eg_instance_types.spot = spot_instance_types
if spot_instance_types is not None:
eg_instance_types.ondemand = on_demand_instance_type
if eg_instance_types.spot is not None or eg_instance_types.ondemand is not None:
eg_compute.instance_types = eg_instance_types
expand_ebs_volume_pool(eg_compute, ebs_volume_pool)
eg_compute.availability_zones = expand_list(availability_zones_list, az_fields, 'AvailabilityZone')
expand_launch_spec(eg_compute, module, is_update, do_not_update)
eg.compute = eg_compute
def expand_ebs_volume_pool(eg_compute, ebs_volumes_list):
if ebs_volumes_list is not None:
eg_volumes = []
for volume in ebs_volumes_list:
eg_volume = spotinst.aws_elastigroup.EbsVolume()
if volume.get('device_name') is not None:
eg_volume.device_name = volume.get('device_name')
if volume.get('volume_ids') is not None:
eg_volume.volume_ids = volume.get('volume_ids')
if eg_volume.device_name is not None:
eg_volumes.append(eg_volume)
if len(eg_volumes) > 0:
eg_compute.ebs_volume_pool = eg_volumes
def expand_launch_spec(eg_compute, module, is_update, do_not_update):
eg_launch_spec = expand_fields(lspec_fields, module.params, 'LaunchSpecification')
if module.params['iam_role_arn'] is not None or module.params['iam_role_name'] is not None:
eg_launch_spec.iam_role = expand_fields(iam_fields, module.params, 'IamRole')
tags = module.params['tags']
load_balancers = module.params['load_balancers']
target_group_arns = module.params['target_group_arns']
block_device_mappings = module.params['block_device_mappings']
network_interfaces = module.params['network_interfaces']
if is_update is True:
if 'image_id' in do_not_update:
delattr(eg_launch_spec, 'image_id')
expand_tags(eg_launch_spec, tags)
expand_load_balancers(eg_launch_spec, load_balancers, target_group_arns)
expand_block_device_mappings(eg_launch_spec, block_device_mappings)
expand_network_interfaces(eg_launch_spec, network_interfaces)
eg_compute.launch_specification = eg_launch_spec
def expand_integrations(eg, module):
rancher = module.params.get('rancher')
mesosphere = module.params.get('mesosphere')
ecs = module.params.get('ecs')
kubernetes = module.params.get('kubernetes')
right_scale = module.params.get('right_scale')
opsworks = module.params.get('opsworks')
chef = module.params.get('chef')
integration_exists = False
eg_integrations = spotinst.aws_elastigroup.ThirdPartyIntegrations()
if mesosphere is not None:
eg_integrations.mesosphere = expand_fields(mesosphere_fields, mesosphere, 'Mesosphere')
integration_exists = True
if ecs is not None:
eg_integrations.ecs = expand_fields(ecs_fields, ecs, 'EcsConfiguration')
integration_exists = True
if kubernetes is not None:
eg_integrations.kubernetes = expand_fields(kubernetes_fields, kubernetes, 'KubernetesConfiguration')
integration_exists = True
if right_scale is not None:
eg_integrations.right_scale = expand_fields(right_scale_fields, right_scale, 'RightScaleConfiguration')
integration_exists = True
if opsworks is not None:
eg_integrations.opsworks = expand_fields(opsworks_fields, opsworks, 'OpsWorksConfiguration')
integration_exists = True
if rancher is not None:
eg_integrations.rancher = expand_fields(rancher_fields, rancher, 'Rancher')
integration_exists = True
if chef is not None:
eg_integrations.chef = expand_fields(chef_fields, chef, 'ChefConfiguration')
integration_exists = True
if integration_exists:
eg.third_parties_integration = eg_integrations
def expand_capacity(eg, module, is_update, do_not_update):
eg_capacity = expand_fields(capacity_fields, module.params, 'Capacity')
if is_update is True:
delattr(eg_capacity, 'unit')
if 'target' in do_not_update:
delattr(eg_capacity, 'target')
eg.capacity = eg_capacity
def expand_strategy(eg, module):
persistence = module.params.get('persistence')
signals = module.params.get('signals')
eg_strategy = expand_fields(strategy_fields, module.params, 'Strategy')
terminate_at_end_of_billing_hour = module.params.get('terminate_at_end_of_billing_hour')
if terminate_at_end_of_billing_hour is not None:
eg_strategy.eg_scaling_strategy = expand_fields(scaling_strategy_fields,
module.params, 'ScalingStrategy')
if persistence is not None:
eg_strategy.persistence = expand_fields(persistence_fields, persistence, 'Persistence')
if signals is not None:
eg_signals = expand_list(signals, signal_fields, 'Signal')
if len(eg_signals) > 0:
eg_strategy.signals = eg_signals
eg.strategy = eg_strategy
def expand_multai(eg, module):
multai_load_balancers = module.params.get('multai_load_balancers')
eg_multai = expand_fields(multai_fields, module.params, 'Multai')
if multai_load_balancers is not None:
eg_multai_load_balancers = expand_list(multai_load_balancers, multai_lb_fields, 'MultaiLoadBalancer')
if len(eg_multai_load_balancers) > 0:
eg_multai.balancers = eg_multai_load_balancers
eg.multai = eg_multai
def expand_scheduled_tasks(eg, module):
scheduled_tasks = module.params.get('scheduled_tasks')
if scheduled_tasks is not None:
eg_scheduling = spotinst.aws_elastigroup.Scheduling()
eg_tasks = expand_list(scheduled_tasks, scheduled_task_fields, 'ScheduledTask')
if len(eg_tasks) > 0:
eg_scheduling.tasks = eg_tasks
eg.scheduling = eg_scheduling
def expand_load_balancers(eg_launchspec, load_balancers, target_group_arns):
if load_balancers is not None or target_group_arns is not None:
eg_load_balancers_config = spotinst.aws_elastigroup.LoadBalancersConfig()
eg_total_lbs = []
if load_balancers is not None:
for elb_name in load_balancers:
eg_elb = spotinst.aws_elastigroup.LoadBalancer()
if elb_name is not None:
eg_elb.name = elb_name
eg_elb.type = 'CLASSIC'
eg_total_lbs.append(eg_elb)
if target_group_arns is not None:
for target_arn in target_group_arns:
eg_elb = spotinst.aws_elastigroup.LoadBalancer()
if target_arn is not None:
eg_elb.arn = target_arn
eg_elb.type = 'TARGET_GROUP'
eg_total_lbs.append(eg_elb)
if len(eg_total_lbs) > 0:
eg_load_balancers_config.load_balancers = eg_total_lbs
eg_launchspec.load_balancers_config = eg_load_balancers_config
def expand_tags(eg_launchspec, tags):
if tags is not None:
eg_tags = []
for tag in tags:
eg_tag = spotinst.aws_elastigroup.Tag()
if tag.keys():
eg_tag.tag_key = tag.keys()[0]
if tag.values():
eg_tag.tag_value = tag.values()[0]
eg_tags.append(eg_tag)
if len(eg_tags) > 0:
eg_launchspec.tags = eg_tags
def expand_block_device_mappings(eg_launchspec, bdms):
if bdms is not None:
eg_bdms = []
for bdm in bdms:
eg_bdm = expand_fields(bdm_fields, bdm, 'BlockDeviceMapping')
if bdm.get('ebs') is not None:
eg_bdm.ebs = expand_fields(ebs_fields, bdm.get('ebs'), 'EBS')
eg_bdms.append(eg_bdm)
if len(eg_bdms) > 0:
eg_launchspec.block_device_mappings = eg_bdms
def expand_network_interfaces(eg_launchspec, enis):
if enis is not None:
eg_enis = []
for eni in enis:
eg_eni = expand_fields(eni_fields, eni, 'NetworkInterface')
eg_pias = expand_list(eni.get('private_ip_addresses'), private_ip_fields, 'PrivateIpAddress')
if eg_pias is not None:
eg_eni.private_ip_addresses = eg_pias
eg_enis.append(eg_eni)
if len(eg_enis) > 0:
eg_launchspec.network_interfaces = eg_enis
def expand_scaling(eg, module):
up_scaling_policies = module.params['up_scaling_policies']
down_scaling_policies = module.params['down_scaling_policies']
target_tracking_policies = module.params['target_tracking_policies']
eg_scaling = spotinst.aws_elastigroup.Scaling()
if up_scaling_policies is not None:
eg_up_scaling_policies = expand_scaling_policies(up_scaling_policies)
if len(eg_up_scaling_policies) > 0:
eg_scaling.up = eg_up_scaling_policies
if down_scaling_policies is not None:
eg_down_scaling_policies = expand_scaling_policies(down_scaling_policies)
if len(eg_down_scaling_policies) > 0:
eg_scaling.down = eg_down_scaling_policies
if target_tracking_policies is not None:
eg_target_tracking_policies = expand_target_tracking_policies(target_tracking_policies)
if len(eg_target_tracking_policies) > 0:
eg_scaling.target = eg_target_tracking_policies
if eg_scaling.down is not None or eg_scaling.up is not None or eg_scaling.target is not None:
eg.scaling = eg_scaling
def expand_list(items, fields, class_name):
if items is not None:
new_objects_list = []
for item in items:
new_obj = expand_fields(fields, item, class_name)
new_objects_list.append(new_obj)
return new_objects_list
def expand_fields(fields, item, class_name):
class_ = getattr(spotinst.aws_elastigroup, class_name)
new_obj = class_()
# Handle primitive fields
if item is not None:
for field in fields:
if isinstance(field, dict):
ansible_field_name = field['ansible_field_name']
spotinst_field_name = field['spotinst_field_name']
else:
ansible_field_name = field
spotinst_field_name = field
if item.get(ansible_field_name) is not None:
setattr(new_obj, spotinst_field_name, item.get(ansible_field_name))
return new_obj
def expand_scaling_policies(scaling_policies):
eg_scaling_policies = []
for policy in scaling_policies:
eg_policy = expand_fields(scaling_policy_fields, policy, 'ScalingPolicy')
eg_policy.action = expand_fields(action_fields, policy, 'ScalingPolicyAction')
eg_scaling_policies.append(eg_policy)
return eg_scaling_policies
def expand_target_tracking_policies(tracking_policies):
eg_tracking_policies = []
for policy in tracking_policies:
eg_policy = expand_fields(tracking_policy_fields, policy, 'TargetTrackingPolicy')
eg_tracking_policies.append(eg_policy)
return eg_tracking_policies
def main():
fields = dict(
account_id=dict(type='str'),
availability_vs_cost=dict(type='str', required=True),
availability_zones=dict(type='list', required=True),
block_device_mappings=dict(type='list'),
chef=dict(type='dict'),
credentials_path=dict(type='path', default="~/.spotinst/credentials"),
do_not_update=dict(default=[], type='list'),
down_scaling_policies=dict(type='list'),
draining_timeout=dict(type='int'),
ebs_optimized=dict(type='bool'),
ebs_volume_pool=dict(type='list'),
ecs=dict(type='dict'),
elastic_beanstalk=dict(type='dict'),
elastic_ips=dict(type='list'),
fallback_to_od=dict(type='bool'),
id=dict(type='str'),
health_check_grace_period=dict(type='int'),
health_check_type=dict(type='str'),
health_check_unhealthy_duration_before_replacement=dict(type='int'),
iam_role_arn=dict(type='str'),
iam_role_name=dict(type='str'),
image_id=dict(type='str', required=True),
key_pair=dict(type='str'),
kubernetes=dict(type='dict'),
lifetime_period=dict(type='int'),
load_balancers=dict(type='list'),
max_size=dict(type='int', required=True),
mesosphere=dict(type='dict'),
min_size=dict(type='int', required=True),
monitoring=dict(type='str'),
multai_load_balancers=dict(type='list'),
multai_token=dict(type='str'),
name=dict(type='str', required=True),
network_interfaces=dict(type='list'),
on_demand_count=dict(type='int'),
on_demand_instance_type=dict(type='str'),
opsworks=dict(type='dict'),
persistence=dict(type='dict'),
product=dict(type='str', required=True),
rancher=dict(type='dict'),
right_scale=dict(type='dict'),
risk=dict(type='int'),
roll_config=dict(type='dict'),
scheduled_tasks=dict(type='list'),
security_group_ids=dict(type='list', required=True),
shutdown_script=dict(type='str'),
signals=dict(type='list'),
spin_up_time=dict(type='int'),
spot_instance_types=dict(type='list', required=True),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(type='list'),
target=dict(type='int', required=True),
target_group_arns=dict(type='list'),
tenancy=dict(type='str'),
terminate_at_end_of_billing_hour=dict(type='bool'),
token=dict(type='str'),
unit=dict(type='str'),
user_data=dict(type='str'),
utilize_reserved_instances=dict(type='bool'),
uniqueness_by=dict(default='name', choices=['name', 'id']),
up_scaling_policies=dict(type='list'),
target_tracking_policies=dict(type='list'),
wait_for_instances=dict(type='bool', default=False),
wait_timeout=dict(type='int')
)
module = AnsibleModule(argument_spec=fields)
if not HAS_SPOTINST_SDK:
module.fail_json(msg="the Spotinst SDK library is required. (pip install spotinst)")
# Retrieve creds file variables
creds_file_loaded_vars = dict()
credentials_path = module.params.get('credentials_path')
try:
with open(credentials_path, "r") as creds:
for line in creds:
eq_index = line.find('=')
var_name = line[:eq_index].strip()
string_value = line[eq_index + 1:].strip()
creds_file_loaded_vars[var_name] = string_value
except IOError:
pass
# End of creds file retrieval
token = module.params.get('token')
if not token:
token = os.environ.get('SPOTINST_TOKEN')
if not token:
token = creds_file_loaded_vars.get("token")
account = module.params.get('account_id')
if not account:
account = os.environ.get('ACCOUNT')
if not account:
account = creds_file_loaded_vars.get("account")
client = spotinst.SpotinstClient(auth_token=token, print_output=False)
if account is not None:
client = spotinst.SpotinstClient(auth_token=token, print_output=False, account_id=account)
group_id, message, has_changed = handle_elastigroup(client=client, module=module)
instances = retrieve_group_instances(client=client, module=module, group_id=group_id)
module.exit_json(changed=has_changed, group_id=group_id, message=message, instances=instances)
if __name__ == '__main__':
main()
|
blueyed/django-autocomplete-light
|
refs/heads/master
|
autocomplete_light/autocomplete/list.py
|
1
|
from __future__ import unicode_literals
from django.utils.encoding import force_text
__all__ = ('AutocompleteList',)
class AutocompleteList(object):
"""
Simple Autocomplete implementation which expects :py:attr:`choices` to be a
list of string choices.
.. py:attribute:: choices
List of string choices.
.. py:attribute:: limit_choices
The maximum of items to suggest from :py:attr:`choices`.
.. py:attribute:: order_by
:py:meth:`~.list.AutocompleteList.order_choices` will use this against
:py:attr:`choices` as an argument :py:func:`sorted`.
It was mainly used as a starter for me when doing test-driven development
and to ensure that the Autocomplete pattern would be concretely simple and
yet powerful.
"""
limit_choices = 20
order_by = lambda cls, choice: force_text(choice).lower()
def choices_for_values(self):
"""
Return any :py:attr:`choices` that is in :py:attr:`values`.
"""
values_choices = []
for choice in self.choices:
if choice in self.values:
values_choices.append(choice)
return self.order_choices(values_choices)
def choices_for_request(self):
"""
Return any :py:attr:`choices` that contains the search string. It is
case insensitive and ignores spaces.
"""
assert self.choices is not None, 'autocomplete.choices is not set'
requests_choices = []
q = self.request.GET.get('q', '').lower().strip()
for choice in self.choices:
if q in force_text(choice).lower():
requests_choices.append(choice)
return self.order_choices(requests_choices)[0:self.limit_choices]
def order_choices(self, choices):
"""
Run :py:func:`sorted` against ``choices`` and :py:attr:`order_by`.
"""
return sorted(choices, key=self.order_by)
|
qizenguf/MLC-STT
|
refs/heads/master
|
ext/ply/example/BASIC/basiclex.py
|
166
|
# An implementation of Dartmouth BASIC (1964)
from ply import *
keywords = (
'LET','READ','DATA','PRINT','GOTO','IF','THEN','FOR','NEXT','TO','STEP',
'END','STOP','DEF','GOSUB','DIM','REM','RETURN','RUN','LIST','NEW',
)
tokens = keywords + (
'EQUALS','PLUS','MINUS','TIMES','DIVIDE','POWER',
'LPAREN','RPAREN','LT','LE','GT','GE','NE',
'COMMA','SEMI', 'INTEGER','FLOAT', 'STRING',
'ID','NEWLINE'
)
t_ignore = ' \t'
def t_REM(t):
r'REM .*'
return t
def t_ID(t):
r'[A-Z][A-Z0-9]*'
if t.value in keywords:
t.type = t.value
return t
t_EQUALS = r'='
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_POWER = r'\^'
t_DIVIDE = r'/'
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LT = r'<'
t_LE = r'<='
t_GT = r'>'
t_GE = r'>='
t_NE = r'<>'
t_COMMA = r'\,'
t_SEMI = r';'
t_INTEGER = r'\d+'
t_FLOAT = r'((\d*\.\d+)(E[\+-]?\d+)?|([1-9]\d*E[\+-]?\d+))'
t_STRING = r'\".*?\"'
def t_NEWLINE(t):
r'\n'
t.lexer.lineno += 1
return t
def t_error(t):
print("Illegal character %s" % t.value[0])
t.lexer.skip(1)
lex.lex(debug=0)
|
RichardLitt/wyrd-django-dev
|
refs/heads/master
|
tests/modeltests/get_object_or_404/tests.py
|
50
|
from __future__ import absolute_import
from django.http import Http404
from django.shortcuts import get_object_or_404, get_list_or_404
from django.test import TestCase
from .models import Author, Article
class GetObjectOr404Tests(TestCase):
def test_get_object_or_404(self):
a1 = Author.objects.create(name="Brave Sir Robin")
a2 = Author.objects.create(name="Patsy")
# No Articles yet, so we should get a Http404 error.
self.assertRaises(Http404, get_object_or_404, Article, title="Foo")
article = Article.objects.create(title="Run away!")
article.authors = [a1, a2]
# get_object_or_404 can be passed a Model to query.
self.assertEqual(
get_object_or_404(Article, title__contains="Run"),
article
)
# We can also use the Article manager through an Author object.
self.assertEqual(
get_object_or_404(a1.article_set, title__contains="Run"),
article
)
# No articles containing "Camelot". This should raise a Http404 error.
self.assertRaises(Http404,
get_object_or_404, a1.article_set, title__contains="Camelot"
)
# Custom managers can be used too.
self.assertEqual(
get_object_or_404(Article.by_a_sir, title="Run away!"),
article
)
# QuerySets can be used too.
self.assertEqual(
get_object_or_404(Article.objects.all(), title__contains="Run"),
article
)
# Just as when using a get() lookup, you will get an error if more than
# one object is returned.
self.assertRaises(Author.MultipleObjectsReturned,
get_object_or_404, Author.objects.all()
)
# Using an EmptyQuerySet raises a Http404 error.
self.assertRaises(Http404,
get_object_or_404, Article.objects.none(), title__contains="Run"
)
# get_list_or_404 can be used to get lists of objects
self.assertEqual(
get_list_or_404(a1.article_set, title__icontains="Run"),
[article]
)
# Http404 is returned if the list is empty.
self.assertRaises(Http404,
get_list_or_404, a1.article_set, title__icontains="Shrubbery"
)
# Custom managers can be used too.
self.assertEqual(
get_list_or_404(Article.by_a_sir, title__icontains="Run"),
[article]
)
# QuerySets can be used too.
self.assertEqual(
get_list_or_404(Article.objects.all(), title__icontains="Run"),
[article]
)
|
mazhekin/node-workshop
|
refs/heads/master
|
challenge6/finished/node_modules/browserify/node_modules/module-deps/node_modules/detective/node_modules/esprima-six/tools/generate-unicode-regex.py
|
341
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# By Yusuke Suzuki <utatane.tea@gmail.com>
# Modified by Mathias Bynens <http://mathiasbynens.be/>
# http://code.google.com/p/esprima/issues/detail?id=110
import sys
import string
import re
class RegExpGenerator(object):
def __init__(self, detector):
self.detector = detector
def generate_identifier_start(self):
r = [ ch for ch in range(0xFFFF + 1) if self.detector.is_identifier_start(ch)]
return self._generate_range(r)
def generate_identifier_part(self):
r = [ ch for ch in range(0xFFFF + 1) if self.detector.is_identifier_part(ch)]
return self._generate_range(r)
def generate_non_ascii_identifier_start(self):
r = [ ch for ch in xrange(0x0080, 0xFFFF + 1) if self.detector.is_identifier_start(ch)]
return self._generate_range(r)
def generate_non_ascii_identifier_part(self):
r = [ ch for ch in range(0x0080, 0xFFFF + 1) if self.detector.is_identifier_part(ch)]
return self._generate_range(r)
def generate_non_ascii_separator_space(self):
r = [ ch for ch in range(0x0080, 0xFFFF + 1) if self.detector.is_separator_space(ch)]
return self._generate_range(r)
def _generate_range(self, r):
if len(r) == 0:
return '[]'
buf = []
start = r[0]
end = r[0]
predict = start + 1
r = r[1:]
for code in r:
if predict == code:
end = code
predict = code + 1
continue
else:
if start == end:
buf.append("\\u%04X" % start)
elif end == start + 1:
buf.append("\\u%04X\\u%04X" % (start, end))
else:
buf.append("\\u%04X-\\u%04X" % (start, end))
start = code
end = code
predict = code + 1
if start == end:
buf.append("\\u%04X" % start)
else:
buf.append("\\u%04X-\\u%04X" % (start, end))
return '[' + ''.join(buf) + ']'
class Detector(object):
def __init__(self, data):
self.data = data
def is_ascii(self, ch):
return ch < 0x80
def is_ascii_alpha(self, ch):
v = ch | 0x20
return v >= ord('a') and v <= ord('z')
def is_decimal_digit(self, ch):
return ch >= ord('0') and ch <= ord('9')
def is_octal_digit(self, ch):
return ch >= ord('0') and ch <= ord('7')
def is_hex_digit(self, ch):
v = ch | 0x20
return self.is_decimal_digit(c) or (v >= ord('a') and v <= ord('f'))
def is_digit(self, ch):
return self.is_decimal_digit(ch) or self.data[ch] == 'Nd'
def is_ascii_alphanumeric(self, ch):
return self.is_decimal_digit(ch) or self.is_ascii_alpha(ch)
def _is_non_ascii_identifier_start(self, ch):
c = self.data[ch]
return c == 'Lu' or c == 'Ll' or c == 'Lt' or c == 'Lm' or c == 'Lo' or c == 'Nl'
def _is_non_ascii_identifier_part(self, ch):
c = self.data[ch]
return c == 'Lu' or c == 'Ll' or c == 'Lt' or c == 'Lm' or c == 'Lo' or c == 'Nl' or c == 'Mn' or c == 'Mc' or c == 'Nd' or c == 'Pc' or ch == 0x200C or ch == 0x200D
def is_separator_space(self, ch):
return self.data[ch] == 'Zs'
def is_white_space(self, ch):
return ch == ord(' ') or ch == ord("\t") or ch == 0xB or ch == 0xC or ch == 0x00A0 or ch == 0xFEFF or self.is_separator_space(ch)
def is_line_terminator(self, ch):
return ch == 0x000D or ch == 0x000A or self.is_line_or_paragraph_terminator(ch)
def is_line_or_paragraph_terminator(self, ch):
return ch == 0x2028 or ch == 0x2029
def is_identifier_start(self, ch):
if self.is_ascii(ch):
return ch == ord('$') or ch == ord('_') or ch == ord('\\') or self.is_ascii_alpha(ch)
return self._is_non_ascii_identifier_start(ch)
def is_identifier_part(self, ch):
if self.is_ascii(ch):
return ch == ord('$') or ch == ord('_') or ch == ord('\\') or self.is_ascii_alphanumeric(ch)
return self._is_non_ascii_identifier_part(ch)
def analyze(source):
data = []
dictionary = {}
with open(source) as uni:
flag = False
first = 0
for line in uni:
d = string.split(line.strip(), ";")
val = int(d[0], 16)
if flag:
if re.compile("<.+, Last>").match(d[1]):
# print "%s : u%X" % (d[1], val)
flag = False
for t in range(first, val+1):
dictionary[t] = str(d[2])
else:
raise "Database Exception"
else:
if re.compile("<.+, First>").match(d[1]):
# print "%s : u%X" % (d[1], val)
flag = True
first = val
else:
dictionary[val] = str(d[2])
for i in range(0xFFFF + 1):
if dictionary.get(i) == None:
data.append("Un")
else:
data.append(dictionary[i])
return RegExpGenerator(Detector(data))
def main(source):
generator = analyze(source)
print generator.generate_non_ascii_identifier_start()
print generator.generate_non_ascii_identifier_part()
print generator.generate_non_ascii_separator_space()
if __name__ == '__main__':
main(sys.argv[1])
|
springcoil/euroscipy_proceedings
|
refs/heads/master
|
publisher/tempita/__init__.py
|
34
|
"""
A small templating language
This implements a small templating language. This language implements
if/elif/else, for/continue/break, expressions, and blocks of Python
code. The syntax is::
{{any expression (function calls etc)}}
{{any expression | filter}}
{{for x in y}}...{{endfor}}
{{if x}}x{{elif y}}y{{else}}z{{endif}}
{{py:x=1}}
{{py:
def foo(bar):
return 'baz'
}}
{{default var = default_value}}
{{# comment}}
You use this with the ``Template`` class or the ``sub`` shortcut.
The ``Template`` class takes the template string and the name of
the template (for errors) and a default namespace. Then (like
``string.Template``) you can call the ``tmpl.substitute(**kw)``
method to make a substitution (or ``tmpl.substitute(a_dict)``).
``sub(content, **kw)`` substitutes the template immediately. You
can use ``__name='tmpl.html'`` to set the name of the template.
If there are syntax errors ``TemplateError`` will be raised.
"""
import re
import sys
import cgi
from urllib import quote as url_quote
import os
import tokenize
from cStringIO import StringIO
from tempita._looper import looper
from tempita.compat3 import bytes, basestring_, next, is_unicode, coerce_text
__all__ = ['TemplateError', 'Template', 'sub', 'HTMLTemplate',
'sub_html', 'html', 'bunch']
in_re = re.compile(r'\s+in\s+')
var_re = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
class TemplateError(Exception):
"""Exception raised while parsing a template
"""
def __init__(self, message, position, name=None):
Exception.__init__(self, message)
self.position = position
self.name = name
def __str__(self):
msg = ' '.join(self.args)
if self.position:
msg = '%s at line %s column %s' % (
msg, self.position[0], self.position[1])
if self.name:
msg += ' in %s' % self.name
return msg
class _TemplateContinue(Exception):
pass
class _TemplateBreak(Exception):
pass
def get_file_template(name, from_template):
path = os.path.join(os.path.dirname(from_template.name), name)
return from_template.__class__.from_filename(
path, namespace=from_template.namespace,
get_template=from_template.get_template)
class Template(object):
default_namespace = {
'start_braces': '{{',
'end_braces': '}}',
'looper': looper,
}
default_encoding = 'utf8'
default_inherit = None
def __init__(self, content, name=None, namespace=None, stacklevel=None,
get_template=None, default_inherit=None, line_offset=0,
delimeters=None):
self.content = content
# set delimeters
if delimeters is None:
delimeters = (self.default_namespace['start_braces'],
self.default_namespace['end_braces'])
else:
assert len(delimeters) == 2 and all([isinstance(delimeter, basestring)
for delimeter in delimeters])
self.default_namespace = self.__class__.default_namespace.copy()
self.default_namespace['start_braces'] = delimeters[0]
self.default_namespace['end_braces'] = delimeters[1]
self.delimeters = delimeters
self._unicode = is_unicode(content)
if name is None and stacklevel is not None:
try:
caller = sys._getframe(stacklevel)
except ValueError:
pass
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__file__' in globals:
name = globals['__file__']
if name.endswith('.pyc') or name.endswith('.pyo'):
name = name[:-1]
elif '__name__' in globals:
name = globals['__name__']
else:
name = '<string>'
if lineno:
name += ':%s' % lineno
self.name = name
self._parsed = parse(content, name=name, line_offset=line_offset, delimeters=self.delimeters)
if namespace is None:
namespace = {}
self.namespace = namespace
self.get_template = get_template
if default_inherit is not None:
self.default_inherit = default_inherit
def from_filename(cls, filename, namespace=None, encoding=None,
default_inherit=None, get_template=get_file_template):
f = open(filename, 'rb')
c = f.read()
f.close()
if encoding:
c = c.decode(encoding)
return cls(content=c, name=filename, namespace=namespace,
default_inherit=default_inherit, get_template=get_template)
from_filename = classmethod(from_filename)
def __repr__(self):
return '<%s %s name=%r>' % (
self.__class__.__name__,
hex(id(self))[2:], self.name)
def substitute(self, *args, **kw):
if args:
if kw:
raise TypeError(
"You can only give positional *or* keyword arguments")
if len(args) > 1:
raise TypeError(
"You can only give one positional argument")
if not hasattr(args[0], 'items'):
raise TypeError(
"If you pass in a single argument, you must pass in a dictionary-like object (with a .items() method); you gave %r"
% (args[0],))
kw = args[0]
ns = kw
ns['__template_name__'] = self.name
if self.namespace:
ns.update(self.namespace)
result, defs, inherit = self._interpret(ns)
if not inherit:
inherit = self.default_inherit
if inherit:
result = self._interpret_inherit(result, defs, inherit, ns)
return result
def _interpret(self, ns):
__traceback_hide__ = True
parts = []
defs = {}
self._interpret_codes(self._parsed, ns, out=parts, defs=defs)
if '__inherit__' in defs:
inherit = defs.pop('__inherit__')
else:
inherit = None
return ''.join(parts), defs, inherit
def _interpret_inherit(self, body, defs, inherit_template, ns):
__traceback_hide__ = True
if not self.get_template:
raise TemplateError(
'You cannot use inheritance without passing in get_template',
position=None, name=self.name)
templ = self.get_template(inherit_template, self)
self_ = TemplateObject(self.name)
for name, value in defs.iteritems():
setattr(self_, name, value)
self_.body = body
ns = ns.copy()
ns['self'] = self_
return templ.substitute(ns)
def _interpret_codes(self, codes, ns, out, defs):
__traceback_hide__ = True
for item in codes:
if isinstance(item, basestring_):
out.append(item)
else:
self._interpret_code(item, ns, out, defs)
def _interpret_code(self, code, ns, out, defs):
__traceback_hide__ = True
name, pos = code[0], code[1]
if name == 'py':
self._exec(code[2], ns, pos)
elif name == 'continue':
raise _TemplateContinue()
elif name == 'break':
raise _TemplateBreak()
elif name == 'for':
vars, expr, content = code[2], code[3], code[4]
expr = self._eval(expr, ns, pos)
self._interpret_for(vars, expr, content, ns, out, defs)
elif name == 'cond':
parts = code[2:]
self._interpret_if(parts, ns, out, defs)
elif name == 'expr':
parts = code[2].split('|')
base = self._eval(parts[0], ns, pos)
for part in parts[1:]:
func = self._eval(part, ns, pos)
base = func(base)
out.append(self._repr(base, pos))
elif name == 'default':
var, expr = code[2], code[3]
if var not in ns:
result = self._eval(expr, ns, pos)
ns[var] = result
elif name == 'inherit':
expr = code[2]
value = self._eval(expr, ns, pos)
defs['__inherit__'] = value
elif name == 'def':
name = code[2]
signature = code[3]
parts = code[4]
ns[name] = defs[name] = TemplateDef(self, name, signature, body=parts, ns=ns,
pos=pos)
elif name == 'comment':
return
else:
assert 0, "Unknown code: %r" % name
def _interpret_for(self, vars, expr, content, ns, out, defs):
__traceback_hide__ = True
for item in expr:
if len(vars) == 1:
ns[vars[0]] = item
else:
if len(vars) != len(item):
raise ValueError(
'Need %i items to unpack (got %i items)'
% (len(vars), len(item)))
for name, value in zip(vars, item):
ns[name] = value
try:
self._interpret_codes(content, ns, out, defs)
except _TemplateContinue:
continue
except _TemplateBreak:
break
def _interpret_if(self, parts, ns, out, defs):
__traceback_hide__ = True
# @@: if/else/else gets through
for part in parts:
assert not isinstance(part, basestring_)
name, pos = part[0], part[1]
if name == 'else':
result = True
else:
result = self._eval(part[2], ns, pos)
if result:
self._interpret_codes(part[3], ns, out, defs)
break
def _eval(self, code, ns, pos):
__traceback_hide__ = True
try:
try:
value = eval(code, self.default_namespace, ns)
except SyntaxError, e:
raise SyntaxError(
'invalid syntax in expression: %s' % code)
return value
except:
exc_info = sys.exc_info()
e = exc_info[1]
if getattr(e, 'args', None):
arg0 = e.args[0]
else:
arg0 = coerce_text(e)
e.args = (self._add_line_info(arg0, pos),)
raise exc_info[0], e, exc_info[2]
def _exec(self, code, ns, pos):
__traceback_hide__ = True
try:
exec code in self.default_namespace, ns
except:
exc_info = sys.exc_info()
e = exc_info[1]
if e.args:
e.args = (self._add_line_info(e.args[0], pos),)
else:
e.args = (self._add_line_info(None, pos),)
raise exc_info[0], e, exc_info[2]
def _repr(self, value, pos):
__traceback_hide__ = True
try:
if value is None:
return ''
if self._unicode:
try:
value = unicode(value)
except UnicodeDecodeError:
value = bytes(value)
else:
if not isinstance(value, basestring_):
value = coerce_text(value)
if (is_unicode(value)
and self.default_encoding):
value = value.encode(self.default_encoding)
except:
exc_info = sys.exc_info()
e = exc_info[1]
e.args = (self._add_line_info(e.args[0], pos),)
raise exc_info[0], e, exc_info[2]
else:
if self._unicode and isinstance(value, bytes):
if not self.default_encoding:
raise UnicodeDecodeError(
'Cannot decode bytes value %r into unicode '
'(no default_encoding provided)' % value)
try:
value = value.decode(self.default_encoding)
except UnicodeDecodeError, e:
raise UnicodeDecodeError(
e.encoding,
e.object,
e.start,
e.end,
e.reason + ' in string %r' % value)
elif not self._unicode and is_unicode(value):
if not self.default_encoding:
raise UnicodeEncodeError(
'Cannot encode unicode value %r into bytes '
'(no default_encoding provided)' % value)
value = value.encode(self.default_encoding)
return value
def _add_line_info(self, msg, pos):
msg = "%s at line %s column %s" % (
msg, pos[0], pos[1])
if self.name:
msg += " in file %s" % self.name
return msg
def sub(content, delimeters=None, **kw):
name = kw.get('__name')
tmpl = Template(content, name=name, delimeters=delimeters)
return tmpl.substitute(kw)
def paste_script_template_renderer(content, vars, filename=None):
tmpl = Template(content, name=filename)
return tmpl.substitute(vars)
class bunch(dict):
def __init__(self, **kw):
for name, value in kw.iteritems():
setattr(self, name, value)
def __setattr__(self, name, value):
self[name] = value
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __getitem__(self, key):
if 'default' in self:
try:
return dict.__getitem__(self, key)
except KeyError:
return dict.__getitem__(self, 'default')
else:
return dict.__getitem__(self, key)
def __repr__(self):
items = [
(k, v) for k, v in self.iteritems()]
items.sort()
return '<%s %s>' % (
self.__class__.__name__,
' '.join(['%s=%r' % (k, v) for k, v in items]))
############################################################
## HTML Templating
############################################################
class html(object):
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def __html__(self):
return self.value
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__, self.value)
def html_quote(value, force=True):
if not force and hasattr(value, '__html__'):
return value.__html__()
if value is None:
return ''
if not isinstance(value, basestring_):
value = coerce_text(value)
if sys.version >= "3" and isinstance(value, bytes):
value = cgi.escape(value.decode('latin1'), 1)
value = value.encode('latin1')
else:
value = cgi.escape(value, 1)
if sys.version < "3":
if is_unicode(value):
value = value.encode('ascii', 'xmlcharrefreplace')
return value
def url(v):
v = coerce_text(v)
if is_unicode(v):
v = v.encode('utf8')
return url_quote(v)
def attr(**kw):
kw = list(kw.iteritems())
kw.sort()
parts = []
for name, value in kw:
if value is None:
continue
if name.endswith('_'):
name = name[:-1]
parts.append('%s="%s"' % (html_quote(name), html_quote(value)))
return html(' '.join(parts))
class HTMLTemplate(Template):
default_namespace = Template.default_namespace.copy()
default_namespace.update(dict(
html=html,
attr=attr,
url=url,
html_quote=html_quote,
))
def _repr(self, value, pos):
if hasattr(value, '__html__'):
value = value.__html__()
quote = False
else:
quote = True
plain = Template._repr(self, value, pos)
if quote:
return html_quote(plain)
else:
return plain
def sub_html(content, **kw):
name = kw.get('__name')
tmpl = HTMLTemplate(content, name=name)
return tmpl.substitute(kw)
class TemplateDef(object):
def __init__(self, template, func_name, func_signature,
body, ns, pos, bound_self=None):
self._template = template
self._func_name = func_name
self._func_signature = func_signature
self._body = body
self._ns = ns
self._pos = pos
self._bound_self = bound_self
def __repr__(self):
return '<tempita function %s(%s) at %s:%s>' % (
self._func_name, self._func_signature,
self._template.name, self._pos)
def __str__(self):
return self()
def __call__(self, *args, **kw):
values = self._parse_signature(args, kw)
ns = self._ns.copy()
ns.update(values)
if self._bound_self is not None:
ns['self'] = self._bound_self
out = []
subdefs = {}
self._template._interpret_codes(self._body, ns, out, subdefs)
return ''.join(out)
def __get__(self, obj, type=None):
if obj is None:
return self
return self.__class__(
self._template, self._func_name, self._func_signature,
self._body, self._ns, self._pos, bound_self=obj)
def _parse_signature(self, args, kw):
values = {}
sig_args, var_args, var_kw, defaults = self._func_signature
extra_kw = {}
for name, value in kw.iteritems():
if not var_kw and name not in sig_args:
raise TypeError(
'Unexpected argument %s' % name)
if name in sig_args:
values[sig_args] = value
else:
extra_kw[name] = value
args = list(args)
sig_args = list(sig_args)
while args:
while sig_args and sig_args[0] in values:
sig_args.pop(0)
if sig_args:
name = sig_args.pop(0)
values[name] = args.pop(0)
elif var_args:
values[var_args] = tuple(args)
break
else:
raise TypeError(
'Extra position arguments: %s'
% ', '.join(repr(v) for v in args))
for name, value_expr in defaults.iteritems():
if name not in values:
values[name] = self._template._eval(
value_expr, self._ns, self._pos)
for name in sig_args:
if name not in values:
raise TypeError(
'Missing argument: %s' % name)
if var_kw:
values[var_kw] = extra_kw
return values
class TemplateObject(object):
def __init__(self, name):
self.__name = name
self.get = TemplateObjectGetter(self)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__name)
class TemplateObjectGetter(object):
def __init__(self, template_obj):
self.__template_obj = template_obj
def __getattr__(self, attr):
return getattr(self.__template_obj, attr, Empty)
def __repr__(self):
return '<%s around %r>' % (self.__class__.__name__, self.__template_obj)
class _Empty(object):
def __call__(self, *args, **kw):
return self
def __str__(self):
return ''
def __repr__(self):
return 'Empty'
def __unicode__(self):
return u''
def __iter__(self):
return iter(())
def __bool__(self):
return False
if sys.version < "3":
__nonzero__ = __bool__
Empty = _Empty()
del _Empty
############################################################
## Lexing and Parsing
############################################################
def lex(s, name=None, trim_whitespace=True, line_offset=0, delimeters=None):
"""
Lex a string into chunks:
>>> lex('hey')
['hey']
>>> lex('hey {{you}}')
['hey ', ('you', (1, 7))]
>>> lex('hey {{')
Traceback (most recent call last):
...
TemplateError: No }} to finish last expression at line 1 column 7
>>> lex('hey }}')
Traceback (most recent call last):
...
TemplateError: }} outside expression at line 1 column 7
>>> lex('hey {{ {{')
Traceback (most recent call last):
...
TemplateError: {{ inside expression at line 1 column 10
"""
if delimeters is None:
delimeters = ( Template.default_namespace['start_braces'],
Template.default_namespace['end_braces'] )
in_expr = False
chunks = []
last = 0
last_pos = (1, 1)
token_re = re.compile(r'%s|%s' % (re.escape(delimeters[0]),
re.escape(delimeters[1])))
for match in token_re.finditer(s):
expr = match.group(0)
pos = find_position(s, match.end(), line_offset)
if expr == delimeters[0] and in_expr:
raise TemplateError('%s inside expression' % delimeters[0],
position=pos,
name=name)
elif expr == delimeters[1] and not in_expr:
raise TemplateError('%s outside expression' % delimeters[1],
position=pos,
name=name)
if expr == delimeters[0]:
part = s[last:match.start()]
if part:
chunks.append(part)
in_expr = True
else:
chunks.append((s[last:match.start()], last_pos))
in_expr = False
last = match.end()
last_pos = pos
if in_expr:
raise TemplateError('No %s to finish last expression' % delimeters[1],
name=name, position=last_pos)
part = s[last:]
if part:
chunks.append(part)
if trim_whitespace:
chunks = trim_lex(chunks)
return chunks
statement_re = re.compile(r'^(?:if |elif |for |def |inherit |default |py:)')
single_statements = ['else', 'endif', 'endfor', 'enddef', 'continue', 'break']
trail_whitespace_re = re.compile(r'\n\r?[\t ]*$')
lead_whitespace_re = re.compile(r'^[\t ]*\n')
def trim_lex(tokens):
r"""
Takes a lexed set of tokens, and removes whitespace when there is
a directive on a line by itself:
>>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False)
>>> tokens
[('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny']
>>> trim_lex(tokens)
[('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y']
"""
last_trim = None
for i in range(len(tokens)):
current = tokens[i]
if isinstance(tokens[i], basestring_):
# we don't trim this
continue
item = current[0]
if not statement_re.search(item) and item not in single_statements:
continue
if not i:
prev = ''
else:
prev = tokens[i - 1]
if i + 1 >= len(tokens):
next_chunk = ''
else:
next_chunk = tokens[i + 1]
if (not isinstance(next_chunk, basestring_)
or not isinstance(prev, basestring_)):
continue
prev_ok = not prev or trail_whitespace_re.search(prev)
if i == 1 and not prev.strip():
prev_ok = True
if last_trim is not None and last_trim + 2 == i and not prev.strip():
prev_ok = 'last'
if (prev_ok
and (not next_chunk or lead_whitespace_re.search(next_chunk)
or (i == len(tokens) - 2 and not next_chunk.strip()))):
if prev:
if ((i == 1 and not prev.strip())
or prev_ok == 'last'):
tokens[i - 1] = ''
else:
m = trail_whitespace_re.search(prev)
# +1 to leave the leading \n on:
prev = prev[:m.start() + 1]
tokens[i - 1] = prev
if next_chunk:
last_trim = i
if i == len(tokens) - 2 and not next_chunk.strip():
tokens[i + 1] = ''
else:
m = lead_whitespace_re.search(next_chunk)
next_chunk = next_chunk[m.end():]
tokens[i + 1] = next_chunk
return tokens
def find_position(string, index, line_offset):
"""Given a string and index, return (line, column)"""
leading = string[:index].splitlines()
return (len(leading) + line_offset, len(leading[-1]) + 1)
def parse(s, name=None, line_offset=0, delimeters=None):
r"""
Parses a string into a kind of AST
>>> parse('{{x}}')
[('expr', (1, 3), 'x')]
>>> parse('foo')
['foo']
>>> parse('{{if x}}test{{endif}}')
[('cond', (1, 3), ('if', (1, 3), 'x', ['test']))]
>>> parse('series->{{for x in y}}x={{x}}{{endfor}}')
['series->', ('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])]
>>> parse('{{for x, y in z:}}{{continue}}{{endfor}}')
[('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])]
>>> parse('{{py:x=1}}')
[('py', (1, 3), 'x=1')]
>>> parse('{{if x}}a{{elif y}}b{{else}}c{{endif}}')
[('cond', (1, 3), ('if', (1, 3), 'x', ['a']), ('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))]
Some exceptions::
>>> parse('{{continue}}')
Traceback (most recent call last):
...
TemplateError: continue outside of for loop at line 1 column 3
>>> parse('{{if x}}foo')
Traceback (most recent call last):
...
TemplateError: No {{endif}} at line 1 column 3
>>> parse('{{else}}')
Traceback (most recent call last):
...
TemplateError: else outside of an if block at line 1 column 3
>>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}')
Traceback (most recent call last):
...
TemplateError: Unexpected endif at line 1 column 25
>>> parse('{{if}}{{endif}}')
Traceback (most recent call last):
...
TemplateError: if with no expression at line 1 column 3
>>> parse('{{for x y}}{{endfor}}')
Traceback (most recent call last):
...
TemplateError: Bad for (no "in") in 'x y' at line 1 column 3
>>> parse('{{py:x=1\ny=2}}')
Traceback (most recent call last):
...
TemplateError: Multi-line py blocks must start with a newline at line 1 column 3
"""
if delimeters is None:
delimeters = ( Template.default_namespace['start_braces'],
Template.default_namespace['end_braces'] )
tokens = lex(s, name=name, line_offset=line_offset, delimeters=delimeters)
result = []
while tokens:
next_chunk, tokens = parse_expr(tokens, name)
result.append(next_chunk)
return result
def parse_expr(tokens, name, context=()):
if isinstance(tokens[0], basestring_):
return tokens[0], tokens[1:]
expr, pos = tokens[0]
expr = expr.strip()
if expr.startswith('py:'):
expr = expr[3:].lstrip(' \t')
if expr.startswith('\n') or expr.startswith('\r'):
expr = expr.lstrip('\r\n')
if '\r' in expr:
expr = expr.replace('\r\n', '\n')
expr = expr.replace('\r', '')
expr += '\n'
else:
if '\n' in expr:
raise TemplateError(
'Multi-line py blocks must start with a newline',
position=pos, name=name)
return ('py', pos, expr), tokens[1:]
elif expr in ('continue', 'break'):
if 'for' not in context:
raise TemplateError(
'continue outside of for loop',
position=pos, name=name)
return (expr, pos), tokens[1:]
elif expr.startswith('if '):
return parse_cond(tokens, name, context)
elif (expr.startswith('elif ')
or expr == 'else'):
raise TemplateError(
'%s outside of an if block' % expr.split()[0],
position=pos, name=name)
elif expr in ('if', 'elif', 'for'):
raise TemplateError(
'%s with no expression' % expr,
position=pos, name=name)
elif expr in ('endif', 'endfor', 'enddef'):
raise TemplateError(
'Unexpected %s' % expr,
position=pos, name=name)
elif expr.startswith('for '):
return parse_for(tokens, name, context)
elif expr.startswith('default '):
return parse_default(tokens, name, context)
elif expr.startswith('inherit '):
return parse_inherit(tokens, name, context)
elif expr.startswith('def '):
return parse_def(tokens, name, context)
elif expr.startswith('#'):
return ('comment', pos, tokens[0][0]), tokens[1:]
return ('expr', pos, tokens[0][0]), tokens[1:]
def parse_cond(tokens, name, context):
start = tokens[0][1]
pieces = []
context = context + ('if',)
while 1:
if not tokens:
raise TemplateError(
'Missing {{endif}}',
position=start, name=name)
if (isinstance(tokens[0], tuple)
and tokens[0][0] == 'endif'):
return ('cond', start) + tuple(pieces), tokens[1:]
next_chunk, tokens = parse_one_cond(tokens, name, context)
pieces.append(next_chunk)
def parse_one_cond(tokens, name, context):
(first, pos), tokens = tokens[0], tokens[1:]
content = []
if first.endswith(':'):
first = first[:-1]
if first.startswith('if '):
part = ('if', pos, first[3:].lstrip(), content)
elif first.startswith('elif '):
part = ('elif', pos, first[5:].lstrip(), content)
elif first == 'else':
part = ('else', pos, None, content)
else:
assert 0, "Unexpected token %r at %s" % (first, pos)
while 1:
if not tokens:
raise TemplateError(
'No {{endif}}',
position=pos, name=name)
if (isinstance(tokens[0], tuple)
and (tokens[0][0] == 'endif'
or tokens[0][0].startswith('elif ')
or tokens[0][0] == 'else')):
return part, tokens
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
def parse_for(tokens, name, context):
first, pos = tokens[0]
tokens = tokens[1:]
context = ('for',) + context
content = []
assert first.startswith('for ')
if first.endswith(':'):
first = first[:-1]
first = first[3:].strip()
match = in_re.search(first)
if not match:
raise TemplateError(
'Bad for (no "in") in %r' % first,
position=pos, name=name)
vars = first[:match.start()]
if '(' in vars:
raise TemplateError(
'You cannot have () in the variable section of a for loop (%r)'
% vars, position=pos, name=name)
vars = tuple([
v.strip() for v in first[:match.start()].split(',')
if v.strip()])
expr = first[match.end():]
while 1:
if not tokens:
raise TemplateError(
'No {{endfor}}',
position=pos, name=name)
if (isinstance(tokens[0], tuple)
and tokens[0][0] == 'endfor'):
return ('for', pos, vars, expr, content), tokens[1:]
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
def parse_default(tokens, name, context):
first, pos = tokens[0]
assert first.startswith('default ')
first = first.split(None, 1)[1]
parts = first.split('=', 1)
if len(parts) == 1:
raise TemplateError(
"Expression must be {{default var=value}}; no = found in %r" % first,
position=pos, name=name)
var = parts[0].strip()
if ',' in var:
raise TemplateError(
"{{default x, y = ...}} is not supported",
position=pos, name=name)
if not var_re.search(var):
raise TemplateError(
"Not a valid variable name for {{default}}: %r"
% var, position=pos, name=name)
expr = parts[1].strip()
return ('default', pos, var, expr), tokens[1:]
def parse_inherit(tokens, name, context):
first, pos = tokens[0]
assert first.startswith('inherit ')
expr = first.split(None, 1)[1]
return ('inherit', pos, expr), tokens[1:]
def parse_def(tokens, name, context):
first, start = tokens[0]
tokens = tokens[1:]
assert first.startswith('def ')
first = first.split(None, 1)[1]
if first.endswith(':'):
first = first[:-1]
if '(' not in first:
func_name = first
sig = ((), None, None, {})
elif not first.endswith(')'):
raise TemplateError("Function definition doesn't end with ): %s" % first,
position=start, name=name)
else:
first = first[:-1]
func_name, sig_text = first.split('(', 1)
sig = parse_signature(sig_text, name, start)
context = context + ('def',)
content = []
while 1:
if not tokens:
raise TemplateError(
'Missing {{enddef}}',
position=start, name=name)
if (isinstance(tokens[0], tuple)
and tokens[0][0] == 'enddef'):
return ('def', start, func_name, sig, content), tokens[1:]
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
def parse_signature(sig_text, name, pos):
tokens = tokenize.generate_tokens(StringIO(sig_text).readline)
sig_args = []
var_arg = None
var_kw = None
defaults = {}
def get_token(pos=False):
try:
tok_type, tok_string, (srow, scol), (erow, ecol), line = next(tokens)
except StopIteration:
return tokenize.ENDMARKER, ''
if pos:
return tok_type, tok_string, (srow, scol), (erow, ecol)
else:
return tok_type, tok_string
while 1:
var_arg_type = None
tok_type, tok_string = get_token()
if tok_type == tokenize.ENDMARKER:
break
if tok_type == tokenize.OP and (tok_string == '*' or tok_string == '**'):
var_arg_type = tok_string
tok_type, tok_string = get_token()
if tok_type != tokenize.NAME:
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
var_name = tok_string
tok_type, tok_string = get_token()
if tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','):
if var_arg_type == '*':
var_arg = var_name
elif var_arg_type == '**':
var_kw = var_name
else:
sig_args.append(var_name)
if tok_type == tokenize.ENDMARKER:
break
continue
if var_arg_type is not None:
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
if tok_type == tokenize.OP and tok_string == '=':
nest_type = None
unnest_type = None
nest_count = 0
start_pos = end_pos = None
parts = []
while 1:
tok_type, tok_string, s, e = get_token(True)
if start_pos is None:
start_pos = s
end_pos = e
if tok_type == tokenize.ENDMARKER and nest_count:
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
if (not nest_count and
(tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','))):
default_expr = isolate_expression(sig_text, start_pos, end_pos)
defaults[var_name] = default_expr
sig_args.append(var_name)
break
parts.append((tok_type, tok_string))
if nest_count and tok_type == tokenize.OP and tok_string == nest_type:
nest_count += 1
elif nest_count and tok_type == tokenize.OP and tok_string == unnest_type:
nest_count -= 1
if not nest_count:
nest_type = unnest_type = None
elif not nest_count and tok_type == tokenize.OP and tok_string in ('(', '[', '{'):
nest_type = tok_string
nest_count = 1
unnest_type = {'(': ')', '[': ']', '{': '}'}[nest_type]
return sig_args, var_arg, var_kw, defaults
def isolate_expression(string, start_pos, end_pos):
srow, scol = start_pos
srow -= 1
erow, ecol = end_pos
erow -= 1
lines = string.splitlines(True)
if srow == erow:
return lines[srow][scol:ecol]
parts = [lines[srow][scol:]]
parts.extend(lines[srow+1:erow])
if erow < len(lines):
# It'll sometimes give (end_row_past_finish, 0)
parts.append(lines[erow][:ecol])
return ''.join(parts)
_fill_command_usage = """\
%prog [OPTIONS] TEMPLATE arg=value
Use py:arg=value to set a Python value; otherwise all values are
strings.
"""
def fill_command(args=None):
import sys
import optparse
import pkg_resources
import os
if args is None:
args = sys.argv[1:]
dist = pkg_resources.get_distribution('Paste')
parser = optparse.OptionParser(
version=coerce_text(dist),
usage=_fill_command_usage)
parser.add_option(
'-o', '--output',
dest='output',
metavar="FILENAME",
help="File to write output to (default stdout)")
parser.add_option(
'--html',
dest='use_html',
action='store_true',
help="Use HTML style filling (including automatic HTML quoting)")
parser.add_option(
'--env',
dest='use_env',
action='store_true',
help="Put the environment in as top-level variables")
options, args = parser.parse_args(args)
if len(args) < 1:
print('You must give a template filename')
sys.exit(2)
template_name = args[0]
args = args[1:]
vars = {}
if options.use_env:
vars.update(os.environ)
for value in args:
if '=' not in value:
print('Bad argument: %r' % value)
sys.exit(2)
name, value = value.split('=', 1)
if name.startswith('py:'):
name = name[:3]
value = eval(value)
vars[name] = value
if template_name == '-':
template_content = sys.stdin.read()
template_name = '<stdin>'
else:
f = open(template_name, 'rb')
template_content = f.read()
f.close()
if options.use_html:
TemplateClass = HTMLTemplate
else:
TemplateClass = Template
template = TemplateClass(template_content, name=template_name)
result = template.substitute(vars)
if options.output:
f = open(options.output, 'wb')
f.write(result)
f.close()
else:
sys.stdout.write(result)
if __name__ == '__main__':
fill_command()
|
applicationdevm/XlsxWriter
|
refs/heads/master
|
examples/chart_pie.py
|
8
|
#######################################################################
#
# An example of creating Excel Pie charts with Python and XlsxWriter.
#
# The demo also shows how to set segment colours. It is possible to
# define chart colors for most types of XlsxWriter charts
# via the add_series() method. However, Pie/Doughnut charts are a special
# case since each segment is represented as a point so it is necessary to
# assign formatting to each point in the series.
#
# Copyright 2013-2015, John McNamara, jmcnamara@cpan.org
#
import xlsxwriter
workbook = xlsxwriter.Workbook('chart_pie.xlsx')
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': 1})
# Add the worksheet data that the charts will refer to.
headings = ['Category', 'Values']
data = [
['Apple', 'Cherry', 'Pecan'],
[60, 30, 10],
]
worksheet.write_row('A1', headings, bold)
worksheet.write_column('A2', data[0])
worksheet.write_column('B2', data[1])
#######################################################################
#
# Create a new chart object.
#
chart1 = workbook.add_chart({'type': 'pie'})
# Configure the series. Note the use of the list syntax to define ranges:
chart1.add_series({
'name': 'Pie sales data',
'categories': ['Sheet1', 1, 0, 3, 0],
'values': ['Sheet1', 1, 1, 3, 1],
})
# Add a title.
chart1.set_title({'name': 'Popular Pie Types'})
# Set an Excel chart style. Colors with white outline and shadow.
chart1.set_style(10)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('C2', chart1, {'x_offset': 25, 'y_offset': 10})
#######################################################################
#
# Create a Pie chart with user defined segment colors.
#
# Create an example Pie chart like above.
chart2 = workbook.add_chart({'type': 'pie'})
# Configure the series and add user defined segment colours.
chart2.add_series({
'name': 'Pie sales data',
'categories': '=Sheet1!A2:A4',
'values': '=Sheet1!B2:B4',
'points': [
{'fill': {'color': '#5ABA10'}},
{'fill': {'color': '#FE110E'}},
{'fill': {'color': '#CA5C05'}},
],
})
# Add a title.
chart2.set_title({'name': 'Pie Chart with user defined colors'})
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('C18', chart2, {'x_offset': 25, 'y_offset': 10})
#######################################################################
#
# Create a Pie chart with rotation of the segments.
#
# Create an example Pie chart like above.
chart3 = workbook.add_chart({'type': 'pie'})
# Configure the series.
chart3.add_series({
'name': 'Pie sales data',
'categories': '=Sheet1!A2:A4',
'values': '=Sheet1!B2:B4',
})
# Add a title.
chart3.set_title({'name': 'Pie Chart with segment rotation'})
# Change the angle/rotation of the first segment.
chart3.set_rotation(90)
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart('C34', chart3, {'x_offset': 25, 'y_offset': 10})
workbook.close()
|
miminus/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/googlesearch.py
|
168
|
from __future__ import unicode_literals
import itertools
import re
from .common import SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
)
class GoogleSearchIE(SearchInfoExtractor):
IE_DESC = 'Google Video search'
_MAX_RESULTS = 1000
IE_NAME = 'video.google:search'
_SEARCH_KEY = 'gvsearch'
_TEST = {
'url': 'gvsearch15:python language',
'info_dict': {
'id': 'python language',
'title': 'python language',
},
'playlist_count': 15,
}
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
res = {
'_type': 'playlist',
'id': query,
'title': query,
}
for pagenum in itertools.count():
result_url = (
'http://www.google.com/search?tbm=vid&q=%s&start=%s&hl=en'
% (compat_urllib_parse.quote_plus(query), pagenum * 10))
webpage = self._download_webpage(
result_url, 'gvsearch:' + query,
note='Downloading result page ' + str(pagenum + 1))
for hit_idx, mobj in enumerate(re.finditer(
r'<h3 class="r"><a href="([^"]+)"', webpage)):
# Skip playlists
if not re.search(r'id="vidthumb%d"' % (hit_idx + 1), webpage):
continue
entries.append({
'_type': 'url',
'url': mobj.group(1)
})
if (len(entries) >= n) or not re.search(r'id="pnnext"', webpage):
res['entries'] = entries[:n]
return res
|
leekchan/django_test
|
refs/heads/master
|
django/contrib/gis/db/backends/base.py
|
8
|
"""
Base/mixin classes for the spatial backend database operations and the
`<Backend>SpatialRefSys` model.
"""
from functools import partial
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialFeatures(object):
gis_enabled = True
# Does the database contain a SpatialRefSys model to store SRID information?
has_spatialrefsys_table = True
# Does the backend support the django.contrib.gis.utils.add_srs_entry() utility?
supports_add_srs_entry = True
# Does the backend introspect GeometryField to its subtypes?
supports_geometry_field_introspection = True
# Reference implementation of 3D functions is:
# http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
supports_3d_functions = False
# Does the database support SRID transform operations?
supports_transform = True
# Do geometric relationship operations operate on real shapes (or only on bounding boxes)?
supports_real_shape_operations = True
# Can geometry fields be null?
supports_null_geometries = True
# Can the `distance` GeoQuerySet method be applied on geodetic coordinate systems?
supports_distance_geodetic = True
# Is the database able to count vertices on polygons (with `num_points`)?
supports_num_points_poly = True
# The following properties indicate if the database backend support
# certain lookups (dwithin, left and right, relate, ...)
supports_distances_lookups = True
supports_left_right_lookups = False
@property
def supports_bbcontains_lookup(self):
return 'bbcontains' in self.connection.ops.gis_operators
@property
def supports_contained_lookup(self):
return 'contained' in self.connection.ops.gis_operators
@property
def supports_dwithin_lookup(self):
return 'dwithin' in self.connection.ops.gis_operators
@property
def supports_relate_lookup(self):
return 'relate' in self.connection.ops.gis_operators
# For each of those methods, the class will have a property named
# `has_<name>_method` (defined in __init__) which accesses connection.ops
# to determine GIS method availability.
geoqueryset_methods = (
'area', 'centroid', 'difference', 'distance', 'distance_spheroid',
'envelope', 'force_rhr', 'geohash', 'gml', 'intersection', 'kml',
'length', 'num_geom', 'perimeter', 'point_on_surface', 'reverse',
'scale', 'snap_to_grid', 'svg', 'sym_difference', 'transform',
'translate', 'union', 'unionagg',
)
# Specifies whether the Collect and Extent aggregates are supported by the database
@property
def supports_collect_aggr(self):
return 'Collect' in self.connection.ops.valid_aggregates
@property
def supports_extent_aggr(self):
return 'Extent' in self.connection.ops.valid_aggregates
@property
def supports_make_line_aggr(self):
return 'MakeLine' in self.connection.ops.valid_aggregates
def __init__(self, *args):
super(BaseSpatialFeatures, self).__init__(*args)
for method in self.geoqueryset_methods:
# Add dynamically properties for each GQS method, e.g. has_force_rhr_method, etc.
setattr(self.__class__, 'has_%s_method' % method,
property(partial(BaseSpatialFeatures.has_ops_method, method=method)))
def has_ops_method(self, method):
return getattr(self.connection.ops, method, False)
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method')
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(
r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)'
r'(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)",'
r'"(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,'
r'AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$'
)
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrieve the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt)
|
noelbk/neutron-juniper
|
refs/heads/master
|
neutron/plugins/nicira/nsxlib/__init__.py
|
34
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
guncoin/guncoin
|
refs/heads/master
|
share/qt/extract_strings_qt.py
|
24
|
#!/usr/bin/env python3
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import operator
import os
import sys
OUT_CPP="qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT=os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print('Cannot extract strings: xgettext utility is not installed or not configured.',file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',file=sys.stderr)
sys.exit(1)
child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w', encoding="utf8")
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings_qt.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('PACKAGE_NAME'),))
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('COPYRIGHT_HOLDERS'),))
if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'):
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};\n')
f.close()
|
aps-sids/zulip
|
refs/heads/master
|
tools/emoji_dump/emoji_dump.py
|
114
|
#!/usr/bin/env python
import os
import shutil
import subprocess
import json
from PIL import Image, ImageDraw, ImageFont
class MissingGlyphError(Exception):
pass
def color_font(name, code_point):
in_name = 'bitmaps/strike1/uni{}.png'.format(code_point)
out_name = 'out/unicode/{}.png'.format(code_point)
try:
shutil.copyfile(in_name, out_name)
except IOError:
raise MissingGlyphError('name: %r code_point: %r' % (name, code_point))
def bw_font(name, code_point):
char = unichr(int(code_point, 16))
AA_SCALE = 8
SIZE = (68, 68)
BIG_SIZE = tuple([x * AA_SCALE for x in SIZE])
# AndroidEmoji.ttf is from
# https://android.googlesource.com/platform/frameworks/base.git/+/master/data/fonts/AndroidEmoji.ttf
# commit 07912f876c8639f811b06831465c14c4a3b17663
font = ImageFont.truetype('AndroidEmoji.ttf', 65 * AA_SCALE)
image = Image.new('RGBA', BIG_SIZE)
draw = ImageDraw.Draw(image)
draw.text((0, 0), char, font=font, fill='black')
image.resize(SIZE, Image.ANTIALIAS).save('out/unicode/{}.png'.format(code_point), 'PNG')
# ttx is in the fonttools pacakge, the -z option is only on master
# https://github.com/behdad/fonttools/
# NotoColorEmoji.tff is from
# https://android.googlesource.com/platform/external/noto-fonts/+/kitkat-release/NotoColorEmoji.ttf
subprocess.call('ttx -v -z extfile NotoColorEmoji.ttf', shell=True)
try:
shutil.rmtree('out')
except OSError:
pass
os.mkdir('out')
os.mkdir('out/unicode')
emoji_map = json.load(open('emoji_map.json'))
# Fix data problem with red/blue cars being inaccurate.
emoji_map['blue_car'] = emoji_map['red_car']
emoji_map['red_car'] = emoji_map['oncoming_automobile']
for name, code_point in emoji_map.items():
try:
color_font(name, code_point)
except MissingGlyphError:
try:
bw_font(name, code_point)
except Exception as e:
print e
print 'Missing {}, {}'.format(name, code_point)
continue
os.symlink('unicode/{}.png'.format(code_point), 'out/{}.png'.format(name))
|
wehkamp/ansible
|
refs/heads/devel
|
test/units/plugins/__init__.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
alset333/NetworkedLearningChatbot
|
refs/heads/master
|
PeterMaar-NetLrnChatBot/Client/PeterMaarNetworkedChatClientGUI.app/Contents/Resources/__boot__.py
|
1
|
def _reset_sys_path():
# Clear generic sys.path[0]
import sys, os
resources = os.environ['RESOURCEPATH']
while sys.path[0] == resources:
del sys.path[0]
_reset_sys_path()
"""
sys.argv emulation
This module starts a basic event loop to collect file- and url-open AppleEvents. Those get
converted to strings and stuffed into sys.argv. When that is done we continue starting
the application.
This is a workaround to convert scripts that expect filenames on the command-line to work
in a GUI environment. GUI applications should not use this feature.
NOTE: This module uses ctypes and not the Carbon modules in the stdlib because the latter
don't work in 64-bit mode and are also not available with python 3.x.
"""
import sys
import os
import time
import ctypes
import struct
class AEDesc (ctypes.Structure):
_fields_ = [
('descKey', ctypes.c_int),
('descContent', ctypes.c_void_p),
]
class EventTypeSpec (ctypes.Structure):
_fields_ = [
('eventClass', ctypes.c_int),
('eventKind', ctypes.c_uint),
]
def _ctypes_setup():
carbon = ctypes.CDLL('/System/Library/Carbon.framework/Carbon')
timer_func = ctypes.CFUNCTYPE(
None, ctypes.c_void_p, ctypes.c_long)
ae_callback = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p)
carbon.AEInstallEventHandler.argtypes = [
ctypes.c_int, ctypes.c_int, ae_callback,
ctypes.c_void_p, ctypes.c_char ]
carbon.AERemoveEventHandler.argtypes = [
ctypes.c_int, ctypes.c_int, ae_callback,
ctypes.c_char ]
carbon.AEProcessEvent.restype = ctypes.c_int
carbon.AEProcessEvent.argtypes = [ctypes.c_void_p]
carbon.ReceiveNextEvent.restype = ctypes.c_int
carbon.ReceiveNextEvent.argtypes = [
ctypes.c_long, ctypes.POINTER(EventTypeSpec),
ctypes.c_double, ctypes.c_char,
ctypes.POINTER(ctypes.c_void_p)
]
carbon.AEGetParamDesc.restype = ctypes.c_int
carbon.AEGetParamDesc.argtypes = [
ctypes.c_void_p, ctypes.c_int, ctypes.c_int,
ctypes.POINTER(AEDesc)]
carbon.AECountItems.restype = ctypes.c_int
carbon.AECountItems.argtypes = [ ctypes.POINTER(AEDesc),
ctypes.POINTER(ctypes.c_long) ]
carbon.AEGetNthDesc.restype = ctypes.c_int
carbon.AEGetNthDesc.argtypes = [
ctypes.c_void_p, ctypes.c_long, ctypes.c_int,
ctypes.c_void_p, ctypes.c_void_p ]
carbon.AEGetDescDataSize.restype = ctypes.c_int
carbon.AEGetDescDataSize.argtypes = [ ctypes.POINTER(AEDesc) ]
carbon.AEGetDescData.restype = ctypes.c_int
carbon.AEGetDescData.argtypes = [
ctypes.POINTER(AEDesc),
ctypes.c_void_p,
ctypes.c_int,
]
carbon.FSRefMakePath.restype = ctypes.c_int
carbon.FSRefMakePath.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint]
return carbon
def _run_argvemulator(timeout = 60):
# Configure ctypes
carbon = _ctypes_setup()
# Is the emulator running?
running = [True]
timeout = [timeout]
# Configure AppleEvent handlers
ae_callback = carbon.AEInstallEventHandler.argtypes[2]
kAEInternetSuite, = struct.unpack('>i', b'GURL')
kAEISGetURL, = struct.unpack('>i', b'GURL')
kCoreEventClass, = struct.unpack('>i', b'aevt')
kAEOpenApplication, = struct.unpack('>i', b'oapp')
kAEOpenDocuments, = struct.unpack('>i', b'odoc')
keyDirectObject, = struct.unpack('>i', b'----')
typeAEList, = struct.unpack('>i', b'list')
typeChar, = struct.unpack('>i', b'TEXT')
typeFSRef, = struct.unpack('>i', b'fsrf')
FALSE = b'\0'
TRUE = b'\1'
eventLoopTimedOutErr = -9875
kEventClassAppleEvent, = struct.unpack('>i', b'eppc')
kEventAppleEvent = 1
@ae_callback
def open_app_handler(message, reply, refcon):
# Got a kAEOpenApplication event, which means we can
# start up. On some OSX versions this event is even
# sent when an kAEOpenDocuments or kAEOpenURLs event
# is sent later on.
#
# Therefore don't set running to false, but reduce the
# timeout to at most two seconds beyond the current time.
timeout[0] = min(timeout[0], time.time() - start + 2)
#running[0] = False
return 0
carbon.AEInstallEventHandler(kCoreEventClass, kAEOpenApplication,
open_app_handler, 0, FALSE)
@ae_callback
def open_file_handler(message, reply, refcon):
listdesc = AEDesc()
sts = carbon.AEGetParamDesc(message, keyDirectObject, typeAEList,
ctypes.byref(listdesc))
if sts != 0:
print("argvemulator warning: cannot unpack open document event")
running[0] = False
return
item_count = ctypes.c_long()
sts = carbon.AECountItems(ctypes.byref(listdesc), ctypes.byref(item_count))
if sts != 0:
print("argvemulator warning: cannot unpack open document event")
running[0] = False
return
desc = AEDesc()
for i in range(item_count.value):
sts = carbon.AEGetNthDesc(ctypes.byref(listdesc), i+1, typeFSRef, 0, ctypes.byref(desc))
if sts != 0:
print("argvemulator warning: cannot unpack open document event")
running[0] = False
return
sz = carbon.AEGetDescDataSize(ctypes.byref(desc))
buf = ctypes.create_string_buffer(sz)
sts = carbon.AEGetDescData(ctypes.byref(desc), buf, sz)
if sts != 0:
print("argvemulator warning: cannot extract open document event")
continue
fsref = buf
buf = ctypes.create_string_buffer(1024)
sts = carbon.FSRefMakePath(ctypes.byref(fsref), buf, 1023)
if sts != 0:
print("argvemulator warning: cannot extract open document event")
continue
if sys.version_info[0] > 2:
sys.argv.append(buf.value.decode('utf-8'))
else:
sys.argv.append(buf.value)
running[0] = False
return 0
carbon.AEInstallEventHandler(kCoreEventClass, kAEOpenDocuments,
open_file_handler, 0, FALSE)
@ae_callback
def open_url_handler(message, reply, refcon):
listdesc = AEDesc()
ok = carbon.AEGetParamDesc(message, keyDirectObject, typeAEList,
ctypes.byref(listdesc))
if ok != 0:
print("argvemulator warning: cannot unpack open document event")
running[0] = False
return
item_count = ctypes.c_long()
sts = carbon.AECountItems(ctypes.byref(listdesc), ctypes.byref(item_count))
if sts != 0:
print("argvemulator warning: cannot unpack open url event")
running[0] = False
return
desc = AEDesc()
for i in range(item_count.value):
sts = carbon.AEGetNthDesc(ctypes.byref(listdesc), i+1, typeChar, 0, ctypes.byref(desc))
if sts != 0:
print("argvemulator warning: cannot unpack open URL event")
running[0] = False
return
sz = carbon.AEGetDescDataSize(ctypes.byref(desc))
buf = ctypes.create_string_buffer(sz)
sts = carbon.AEGetDescData(ctypes.byref(desc), buf, sz)
if sts != 0:
print("argvemulator warning: cannot extract open URL event")
else:
if sys.version_info[0] > 2:
sys.argv.append(buf.value.decode('utf-8'))
else:
sys.argv.append(buf.value)
running[0] = False
return 0
carbon.AEInstallEventHandler(kAEInternetSuite, kAEISGetURL,
open_url_handler, 0, FALSE)
# Remove the funny -psn_xxx_xxx argument
if len(sys.argv) > 1 and sys.argv[1].startswith('-psn_'):
del sys.argv[1]
start = time.time()
now = time.time()
eventType = EventTypeSpec()
eventType.eventClass = kEventClassAppleEvent
eventType.eventKind = kEventAppleEvent
while running[0] and now - start < timeout[0]:
event = ctypes.c_void_p()
sts = carbon.ReceiveNextEvent(1, ctypes.byref(eventType),
start + timeout[0] - now, TRUE, ctypes.byref(event))
if sts == eventLoopTimedOutErr:
break
elif sts != 0:
print("argvemulator warning: fetching events failed")
break
sts = carbon.AEProcessEvent(event)
if sts != 0:
print("argvemulator warning: processing events failed")
break
carbon.AERemoveEventHandler(kCoreEventClass, kAEOpenApplication,
open_app_handler, FALSE)
carbon.AERemoveEventHandler(kCoreEventClass, kAEOpenDocuments,
open_file_handler, FALSE)
carbon.AERemoveEventHandler(kAEInternetSuite, kAEISGetURL,
open_url_handler, FALSE)
def _argv_emulation():
import sys, os
# only use if started by LaunchServices
if os.environ.get('_PY2APP_LAUNCHED_'):
_run_argvemulator()
_argv_emulation()
def _chdir_resource():
import os
os.chdir(os.environ['RESOURCEPATH'])
_chdir_resource()
def _disable_linecache():
import linecache
def fake_getline(*args, **kwargs):
return ''
linecache.orig_getline = linecache.getline
linecache.getline = fake_getline
_disable_linecache()
import re, sys
cookie_re = re.compile(b"coding[:=]\s*([-\w.]+)")
if sys.version_info[0] == 2:
default_encoding = 'ascii'
else:
default_encoding = 'utf-8'
def guess_encoding(fp):
for i in range(2):
ln = fp.readline()
m = cookie_re.search(ln)
if m is not None:
return m.group(1).decode('ascii')
return default_encoding
def _run():
global __file__
import os, site
sys.frozen = 'macosx_app'
base = os.environ['RESOURCEPATH']
argv0 = os.path.basename(os.environ['ARGVZERO'])
script = SCRIPT_MAP.get(argv0, DEFAULT_SCRIPT)
path = os.path.join(base, script)
sys.argv[0] = __file__ = path
if sys.version_info[0] == 2:
with open(path, 'rU') as fp:
source = fp.read() + "\n"
else:
with open(path, 'rb') as fp:
encoding = guess_encoding(fp)
with open(path, 'r', encoding=encoding) as fp:
source = fp.read() + '\n'
exec(compile(source, path, 'exec'), globals(), globals())
def _setup_ctypes():
from ctypes.macholib import dyld
import os
frameworks = os.path.join(os.environ['RESOURCEPATH'], '..', 'Frameworks')
dyld.DEFAULT_FRAMEWORK_FALLBACK.insert(0, frameworks)
dyld.DEFAULT_LIBRARY_FALLBACK.insert(0, frameworks)
_setup_ctypes()
DEFAULT_SCRIPT='PeterMaarNetworkedChatClientGUI.py'
SCRIPT_MAP={}
_run()
|
r3tard/BartusBot
|
refs/heads/master
|
lib/protorpc/test_util.py
|
24
|
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test utilities for message testing.
Includes module interface test to ensure that public parts of module are
correctly declared in __all__.
Includes message types that correspond to those defined in
services_test.proto.
Includes additional test utilities to make sure encoding/decoding libraries
conform.
"""
from six.moves import range
__author__ = 'rafek@google.com (Rafe Kaplan)'
import cgi
import datetime
import inspect
import os
import re
import socket
import types
import unittest2 as unittest
import six
from . import message_types
from . import messages
from . import util
# Unicode of the word "Russian" in cyrillic.
RUSSIAN = u'\u0440\u0443\u0441\u0441\u043a\u0438\u0439'
# All characters binary value interspersed with nulls.
BINARY = b''.join(six.int2byte(value) + b'\0' for value in range(256))
class TestCase(unittest.TestCase):
def assertRaisesWithRegexpMatch(self,
exception,
regexp,
function,
*params,
**kwargs):
"""Check that exception is raised and text matches regular expression.
Args:
exception: Exception type that is expected.
regexp: String regular expression that is expected in error message.
function: Callable to test.
params: Parameters to forward to function.
kwargs: Keyword arguments to forward to function.
"""
try:
function(*params, **kwargs)
self.fail('Expected exception %s was not raised' % exception.__name__)
except exception as err:
match = bool(re.match(regexp, str(err)))
self.assertTrue(match, 'Expected match "%s", found "%s"' % (regexp,
err))
def assertHeaderSame(self, header1, header2):
"""Check that two HTTP headers are the same.
Args:
header1: Header value string 1.
header2: header value string 2.
"""
value1, params1 = cgi.parse_header(header1)
value2, params2 = cgi.parse_header(header2)
self.assertEqual(value1, value2)
self.assertEqual(params1, params2)
def assertIterEqual(self, iter1, iter2):
"""Check that two iterators or iterables are equal independent of order.
Similar to Python 2.7 assertItemsEqual. Named differently in order to
avoid potential conflict.
Args:
iter1: An iterator or iterable.
iter2: An iterator or iterable.
"""
list1 = list(iter1)
list2 = list(iter2)
unmatched1 = list()
while list1:
item1 = list1[0]
del list1[0]
for index in range(len(list2)):
if item1 == list2[index]:
del list2[index]
break
else:
unmatched1.append(item1)
error_message = []
for item in unmatched1:
error_message.append(
' Item from iter1 not found in iter2: %r' % item)
for item in list2:
error_message.append(
' Item from iter2 not found in iter1: %r' % item)
if error_message:
self.fail('Collections not equivalent:\n' + '\n'.join(error_message))
class ModuleInterfaceTest(object):
"""Test to ensure module interface is carefully constructed.
A module interface is the set of public objects listed in the module __all__
attribute. Modules that that are considered public should have this interface
carefully declared. At all times, the __all__ attribute should have objects
intended to be publically used and all other objects in the module should be
considered unused.
Protected attributes (those beginning with '_') and other imported modules
should not be part of this set of variables. An exception is for variables
that begin and end with '__' which are implicitly part of the interface
(eg. __name__, __file__, __all__ itself, etc.).
Modules that are imported in to the tested modules are an exception and may
be left out of the __all__ definition. The test is done by checking the value
of what would otherwise be a public name and not allowing it to be exported
if it is an instance of a module. Modules that are explicitly exported are
for the time being not permitted.
To use this test class a module should define a new class that inherits first
from ModuleInterfaceTest and then from test_util.TestCase. No other tests
should be added to this test case, making the order of inheritance less
important, but if setUp for some reason is overidden, it is important that
ModuleInterfaceTest is first in the list so that its setUp method is
invoked.
Multiple inheretance is required so that ModuleInterfaceTest is not itself
a test, and is not itself executed as one.
The test class is expected to have the following class attributes defined:
MODULE: A reference to the module that is being validated for interface
correctness.
Example:
Module definition (hello.py):
import sys
__all__ = ['hello']
def _get_outputter():
return sys.stdout
def hello():
_get_outputter().write('Hello\n')
Test definition:
import unittest
from protorpc import test_util
import hello
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = hello
class HelloTest(test_util.TestCase):
... Test 'hello' module ...
if __name__ == '__main__':
unittest.main()
"""
def setUp(self):
"""Set up makes sure that MODULE and IMPORTED_MODULES is defined.
This is a basic configuration test for the test itself so does not
get it's own test case.
"""
if not hasattr(self, 'MODULE'):
self.fail(
"You must define 'MODULE' on ModuleInterfaceTest sub-class %s." %
type(self).__name__)
def testAllExist(self):
"""Test that all attributes defined in __all__ exist."""
missing_attributes = []
for attribute in self.MODULE.__all__:
if not hasattr(self.MODULE, attribute):
missing_attributes.append(attribute)
if missing_attributes:
self.fail('%s of __all__ are not defined in module.' %
missing_attributes)
def testAllExported(self):
"""Test that all public attributes not imported are in __all__."""
missing_attributes = []
for attribute in dir(self.MODULE):
if not attribute.startswith('_'):
if (attribute not in self.MODULE.__all__ and
not isinstance(getattr(self.MODULE, attribute),
types.ModuleType) and
attribute != 'with_statement'):
missing_attributes.append(attribute)
if missing_attributes:
self.fail('%s are not modules and not defined in __all__.' %
missing_attributes)
def testNoExportedProtectedVariables(self):
"""Test that there are no protected variables listed in __all__."""
protected_variables = []
for attribute in self.MODULE.__all__:
if attribute.startswith('_'):
protected_variables.append(attribute)
if protected_variables:
self.fail('%s are protected variables and may not be exported.' %
protected_variables)
def testNoExportedModules(self):
"""Test that no modules exist in __all__."""
exported_modules = []
for attribute in self.MODULE.__all__:
try:
value = getattr(self.MODULE, attribute)
except AttributeError:
# This is a different error case tested for in testAllExist.
pass
else:
if isinstance(value, types.ModuleType):
exported_modules.append(attribute)
if exported_modules:
self.fail('%s are modules and may not be exported.' % exported_modules)
class NestedMessage(messages.Message):
"""Simple message that gets nested in another message."""
a_value = messages.StringField(1, required=True)
class HasNestedMessage(messages.Message):
"""Message that has another message nested in it."""
nested = messages.MessageField(NestedMessage, 1)
repeated_nested = messages.MessageField(NestedMessage, 2, repeated=True)
class HasDefault(messages.Message):
"""Has a default value."""
a_value = messages.StringField(1, default=u'a default')
class OptionalMessage(messages.Message):
"""Contains all message types."""
class SimpleEnum(messages.Enum):
"""Simple enumeration type."""
VAL1 = 1
VAL2 = 2
double_value = messages.FloatField(1, variant=messages.Variant.DOUBLE)
float_value = messages.FloatField(2, variant=messages.Variant.FLOAT)
int64_value = messages.IntegerField(3, variant=messages.Variant.INT64)
uint64_value = messages.IntegerField(4, variant=messages.Variant.UINT64)
int32_value = messages.IntegerField(5, variant=messages.Variant.INT32)
bool_value = messages.BooleanField(6, variant=messages.Variant.BOOL)
string_value = messages.StringField(7, variant=messages.Variant.STRING)
bytes_value = messages.BytesField(8, variant=messages.Variant.BYTES)
enum_value = messages.EnumField(SimpleEnum, 10)
# TODO(rafek): Add support for these variants.
# uint32_value = messages.IntegerField(9, variant=messages.Variant.UINT32)
# sint32_value = messages.IntegerField(11, variant=messages.Variant.SINT32)
# sint64_value = messages.IntegerField(12, variant=messages.Variant.SINT64)
class RepeatedMessage(messages.Message):
"""Contains all message types as repeated fields."""
class SimpleEnum(messages.Enum):
"""Simple enumeration type."""
VAL1 = 1
VAL2 = 2
double_value = messages.FloatField(1,
variant=messages.Variant.DOUBLE,
repeated=True)
float_value = messages.FloatField(2,
variant=messages.Variant.FLOAT,
repeated=True)
int64_value = messages.IntegerField(3,
variant=messages.Variant.INT64,
repeated=True)
uint64_value = messages.IntegerField(4,
variant=messages.Variant.UINT64,
repeated=True)
int32_value = messages.IntegerField(5,
variant=messages.Variant.INT32,
repeated=True)
bool_value = messages.BooleanField(6,
variant=messages.Variant.BOOL,
repeated=True)
string_value = messages.StringField(7,
variant=messages.Variant.STRING,
repeated=True)
bytes_value = messages.BytesField(8,
variant=messages.Variant.BYTES,
repeated=True)
#uint32_value = messages.IntegerField(9, variant=messages.Variant.UINT32)
enum_value = messages.EnumField(SimpleEnum,
10,
repeated=True)
#sint32_value = messages.IntegerField(11, variant=messages.Variant.SINT32)
#sint64_value = messages.IntegerField(12, variant=messages.Variant.SINT64)
class HasOptionalNestedMessage(messages.Message):
nested = messages.MessageField(OptionalMessage, 1)
repeated_nested = messages.MessageField(OptionalMessage, 2, repeated=True)
class ProtoConformanceTestBase(object):
"""Protocol conformance test base class.
Each supported protocol should implement two methods that support encoding
and decoding of Message objects in that format:
encode_message(message) - Serialize to encoding.
encode_message(message, encoded_message) - Deserialize from encoding.
Tests for the modules where these functions are implemented should extend
this class in order to support basic behavioral expectations. This ensures
that protocols correctly encode and decode message transparently to the
caller.
In order to support these test, the base class should also extend the TestCase
class and implement the following class attributes which define the encoded
version of certain protocol buffers:
encoded_partial:
<OptionalMessage
double_value: 1.23
int64_value: -100000000000
string_value: u"a string"
enum_value: OptionalMessage.SimpleEnum.VAL2
>
encoded_full:
<OptionalMessage
double_value: 1.23
float_value: -2.5
int64_value: -100000000000
uint64_value: 102020202020
int32_value: 1020
bool_value: true
string_value: u"a string\u044f"
bytes_value: b"a bytes\xff\xfe"
enum_value: OptionalMessage.SimpleEnum.VAL2
>
encoded_repeated:
<RepeatedMessage
double_value: [1.23, 2.3]
float_value: [-2.5, 0.5]
int64_value: [-100000000000, 20]
uint64_value: [102020202020, 10]
int32_value: [1020, 718]
bool_value: [true, false]
string_value: [u"a string\u044f", u"another string"]
bytes_value: [b"a bytes\xff\xfe", b"another bytes"]
enum_value: [OptionalMessage.SimpleEnum.VAL2,
OptionalMessage.SimpleEnum.VAL 1]
>
encoded_nested:
<HasNestedMessage
nested: <NestedMessage
a_value: "a string"
>
>
encoded_repeated_nested:
<HasNestedMessage
repeated_nested: [
<NestedMessage a_value: "a string">,
<NestedMessage a_value: "another string">
]
>
unexpected_tag_message:
An encoded message that has an undefined tag or number in the stream.
encoded_default_assigned:
<HasDefault
a_value: "a default"
>
encoded_nested_empty:
<HasOptionalNestedMessage
nested: <OptionalMessage>
>
encoded_invalid_enum:
<OptionalMessage
enum_value: (invalid value for serialization type)
>
"""
encoded_empty_message = ''
def testEncodeInvalidMessage(self):
message = NestedMessage()
self.assertRaises(messages.ValidationError,
self.PROTOLIB.encode_message, message)
def CompareEncoded(self, expected_encoded, actual_encoded):
"""Compare two encoded protocol values.
Can be overridden by sub-classes to special case comparison.
For example, to eliminate white space from output that is not
relevant to encoding.
Args:
expected_encoded: Expected string encoded value.
actual_encoded: Actual string encoded value.
"""
self.assertEquals(expected_encoded, actual_encoded)
def EncodeDecode(self, encoded, expected_message):
message = self.PROTOLIB.decode_message(type(expected_message), encoded)
self.assertEquals(expected_message, message)
self.CompareEncoded(encoded, self.PROTOLIB.encode_message(message))
def testEmptyMessage(self):
self.EncodeDecode(self.encoded_empty_message, OptionalMessage())
def testPartial(self):
"""Test message with a few values set."""
message = OptionalMessage()
message.double_value = 1.23
message.int64_value = -100000000000
message.int32_value = 1020
message.string_value = u'a string'
message.enum_value = OptionalMessage.SimpleEnum.VAL2
self.EncodeDecode(self.encoded_partial, message)
def testFull(self):
"""Test all types."""
message = OptionalMessage()
message.double_value = 1.23
message.float_value = -2.5
message.int64_value = -100000000000
message.uint64_value = 102020202020
message.int32_value = 1020
message.bool_value = True
message.string_value = u'a string\u044f'
message.bytes_value = b'a bytes\xff\xfe'
message.enum_value = OptionalMessage.SimpleEnum.VAL2
self.EncodeDecode(self.encoded_full, message)
def testRepeated(self):
"""Test repeated fields."""
message = RepeatedMessage()
message.double_value = [1.23, 2.3]
message.float_value = [-2.5, 0.5]
message.int64_value = [-100000000000, 20]
message.uint64_value = [102020202020, 10]
message.int32_value = [1020, 718]
message.bool_value = [True, False]
message.string_value = [u'a string\u044f', u'another string']
message.bytes_value = [b'a bytes\xff\xfe', b'another bytes']
message.enum_value = [RepeatedMessage.SimpleEnum.VAL2,
RepeatedMessage.SimpleEnum.VAL1]
self.EncodeDecode(self.encoded_repeated, message)
def testNested(self):
"""Test nested messages."""
nested_message = NestedMessage()
nested_message.a_value = u'a string'
message = HasNestedMessage()
message.nested = nested_message
self.EncodeDecode(self.encoded_nested, message)
def testRepeatedNested(self):
"""Test repeated nested messages."""
nested_message1 = NestedMessage()
nested_message1.a_value = u'a string'
nested_message2 = NestedMessage()
nested_message2.a_value = u'another string'
message = HasNestedMessage()
message.repeated_nested = [nested_message1, nested_message2]
self.EncodeDecode(self.encoded_repeated_nested, message)
def testStringTypes(self):
"""Test that encoding str on StringField works."""
message = OptionalMessage()
message.string_value = 'Latin'
self.EncodeDecode(self.encoded_string_types, message)
def testEncodeUninitialized(self):
"""Test that cannot encode uninitialized message."""
required = NestedMessage()
self.assertRaisesWithRegexpMatch(messages.ValidationError,
"Message NestedMessage is missing "
"required field a_value",
self.PROTOLIB.encode_message,
required)
def testUnexpectedField(self):
"""Test decoding and encoding unexpected fields."""
loaded_message = self.PROTOLIB.decode_message(OptionalMessage,
self.unexpected_tag_message)
# Message should be equal to an empty message, since unknown values aren't
# included in equality.
self.assertEquals(OptionalMessage(), loaded_message)
# Verify that the encoded message matches the source, including the
# unknown value.
self.assertEquals(self.unexpected_tag_message,
self.PROTOLIB.encode_message(loaded_message))
def testDoNotSendDefault(self):
"""Test that default is not sent when nothing is assigned."""
self.EncodeDecode(self.encoded_empty_message, HasDefault())
def testSendDefaultExplicitlyAssigned(self):
"""Test that default is sent when explcitly assigned."""
message = HasDefault()
message.a_value = HasDefault.a_value.default
self.EncodeDecode(self.encoded_default_assigned, message)
def testEncodingNestedEmptyMessage(self):
"""Test encoding a nested empty message."""
message = HasOptionalNestedMessage()
message.nested = OptionalMessage()
self.EncodeDecode(self.encoded_nested_empty, message)
def testEncodingRepeatedNestedEmptyMessage(self):
"""Test encoding a nested empty message."""
message = HasOptionalNestedMessage()
message.repeated_nested = [OptionalMessage(), OptionalMessage()]
self.EncodeDecode(self.encoded_repeated_nested_empty, message)
def testContentType(self):
self.assertTrue(isinstance(self.PROTOLIB.CONTENT_TYPE, str))
def testDecodeInvalidEnumType(self):
self.assertRaisesWithRegexpMatch(messages.DecodeError,
'Invalid enum value ',
self.PROTOLIB.decode_message,
OptionalMessage,
self.encoded_invalid_enum)
def testDateTimeNoTimeZone(self):
"""Test that DateTimeFields are encoded/decoded correctly."""
class MyMessage(messages.Message):
value = message_types.DateTimeField(1)
value = datetime.datetime(2013, 1, 3, 11, 36, 30, 123000)
message = MyMessage(value=value)
decoded = self.PROTOLIB.decode_message(
MyMessage, self.PROTOLIB.encode_message(message))
self.assertEquals(decoded.value, value)
def testDateTimeWithTimeZone(self):
"""Test DateTimeFields with time zones."""
class MyMessage(messages.Message):
value = message_types.DateTimeField(1)
value = datetime.datetime(2013, 1, 3, 11, 36, 30, 123000,
util.TimeZoneOffset(8 * 60))
message = MyMessage(value=value)
decoded = self.PROTOLIB.decode_message(
MyMessage, self.PROTOLIB.encode_message(message))
self.assertEquals(decoded.value, value)
def do_with(context, function, *args, **kwargs):
"""Simulate a with statement.
Avoids need to import with from future.
Does not support simulation of 'as'.
Args:
context: Context object normally used with 'with'.
function: Callable to evoke. Replaces with-block.
"""
context.__enter__()
try:
function(*args, **kwargs)
except:
context.__exit__(*sys.exc_info())
finally:
context.__exit__(None, None, None)
def pick_unused_port():
"""Find an unused port to use in tests.
Derived from Damon Kohlers example:
http://code.activestate.com/recipes/531822-pick-unused-port
"""
temp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
temp.bind(('localhost', 0))
port = temp.getsockname()[1]
finally:
temp.close()
return port
def get_module_name(module_attribute):
"""Get the module name.
Args:
module_attribute: An attribute of the module.
Returns:
The fully qualified module name or simple module name where
'module_attribute' is defined if the module name is "__main__".
"""
if module_attribute.__module__ == '__main__':
module_file = inspect.getfile(module_attribute)
default = os.path.basename(module_file).split('.')[0]
return default
else:
return module_attribute.__module__
|
gordon-elliott/glod
|
refs/heads/master
|
src/glod/model/tax_rebate_submission.py
|
1
|
__copyright__ = 'Copyright(c) Gordon Elliott 2020'
"""
"""
from enum import IntEnum
from a_tuin.metadata import (
ObjectFieldGroupBase, Collection, IntEnumField, DecimalField,
IntField, DateField, DescriptionField, StringField
)
class SubmissionStatus(IntEnum):
Preparing = 1
Posted = 2
Revoked = 3
class SubmissionStatusField(IntEnumField):
def __init__(self, name, is_mutable=True, required=False, default=None, description=None, validation=None):
super().__init__(name, SubmissionStatus, is_mutable, required, default, description, validation)
class TaxRebateSubmission(ObjectFieldGroupBase):
# Data usage
#
# Record of the years in which a person's PPS was submitted in a rebate claim
public_interface = (
SubmissionStatusField(
'status',
required=True,
default=SubmissionStatus.Preparing,
description='Records what stage in its lifecycle the submission is at.'
),
IntField('FY', required=True),
DecimalField('calculated_rebate'),
DateField('filing_date'),
DecimalField('estimated_rebate', description='Estimated rebate from CDS1 form.'),
StringField('notice_number'),
DescriptionField('notes'),
)
class TaxRebateSubmissionCollection(Collection):
pass
|
Johnzero/erp
|
refs/heads/fga
|
openerp/addons/point_of_sale/wizard/pos_close_statement.py
|
9
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from tools.translate import _
class pos_close_statement(osv.osv_memory):
_name = 'pos.close.statement'
_description = 'Close Statements'
def cancel_wizard(self, cr, uid, ids, context=None):
if context.get('cancel_action'):
return context['cancel_action']
def close_statement(self, cr, uid, ids, context=None):
"""
Close the statements
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Blank Dictionary
"""
context = context or {}
mod_obj = self.pool.get('ir.model.data')
statement_obj = self.pool.get('account.bank.statement')
journal_obj = self.pool.get('account.journal')
j_ids = journal_obj.search(cr, uid, [('journal_user','=',1)], context=context)
ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id', '=', uid), ('journal_id', 'in', j_ids)], context=context)
if not ids:
raise osv.except_osv(_('Message'), _('Cash registers are already closed.'))
for statement in statement_obj.browse(cr, uid, ids, context=context):
statement_obj.write(cr, uid, [statement.id], {
'balance_end_real': statement.balance_end
}, context=context)
if not statement.journal_id.check_dtls:
statement_obj.button_confirm_cash(cr, uid, [statement.id], context=context)
tree_res = mod_obj.get_object_reference(cr, uid, 'point_of_sale', 'view_cash_statement_pos_tree')
tree_id = tree_res and tree_res[1] or False
form_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_bank_statement_form2')
form_id = form_res and form_res[1] or False
search_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_account_bank_statement_filter')
return {
'domain': str([('id', 'in', ids)]),
'name': _('Close Cash Registers'),
'view_type': 'form',
'view_mode': 'tree,form',
'search_view_id': search_res and search_res[1] or False,
'res_model': 'account.bank.statement',
'views': [(tree_id, 'tree'), (form_id, 'form')],
'type': 'ir.actions.act_window'
}
pos_close_statement()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dayatz/taiga-back
|
refs/heads/stable
|
taiga/projects/tagging/fields.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2017 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2017 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2017 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.forms import widgets
from django.utils.translation import ugettext_lazy as _
from taiga.base.api import serializers
from taiga.base.exceptions import ValidationError
import re
class TagsAndTagsColorsField(serializers.WritableField):
"""
Pickle objects serializer fior stories, tasks and issues tags.
"""
def __init__(self, *args, **kwargs):
def _validate_tag_field(value):
# Valid field:
# - ["tag1", "tag2", "tag3"...]
# - ["tag1", ["tag2", None], ["tag3", "#ccc"], [tag4, #cccccc]...]
for tag in value:
if isinstance(tag, str):
continue
if isinstance(tag, (list, tuple)) and len(tag) == 2:
name = tag[0]
color = tag[1]
if isinstance(name, str):
if color is None or color == "":
continue
if isinstance(color, str) and re.match('^\#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$', color):
continue
raise ValidationError(_("Invalid tag '{value}'. The color is not a "
"valid HEX color or null.").format(value=tag))
raise ValidationError(_("Invalid tag '{value}'. it must be the name or a pair "
"'[\"name\", \"hex color/\" | null]'.").format(value=tag))
super().__init__(*args, **kwargs)
self.validators.append(_validate_tag_field)
def to_native(self, obj):
return obj
def from_native(self, data):
return data
class TagsField(serializers.WritableField):
"""
Pickle objects serializer for tags names.
"""
def __init__(self, *args, **kwargs):
def _validate_tag_field(value):
for tag in value:
if isinstance(tag, str):
continue
raise ValidationError(_("Invalid tag '{value}'. It must be the tag name.").format(value=tag))
super().__init__(*args, **kwargs)
self.validators.append(_validate_tag_field)
def to_native(self, obj):
return obj
def from_native(self, data):
return data
class TagsColorsField(serializers.WritableField):
"""
PgArray objects serializer.
"""
widget = widgets.Textarea
def to_native(self, obj):
return dict(obj)
def from_native(self, data):
return list(data.items())
|
3dfxmadscientist/CBSS
|
refs/heads/master
|
addons/account_test/report/account_test_report.py
|
44
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import time
from report import report_sxw
from openerp.tools.translate import _
#
# Use period and Journal for selection or resources
#
class report_assert_account(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(report_assert_account, self).__init__(cr, uid, name, context=context)
self.localcontext.update( {
'time': time,
'datetime': datetime,
'execute_code': self.execute_code,
})
def execute_code(self, code_exec):
def reconciled_inv():
"""
returns the list of invoices that are set as reconciled = True
"""
return self.pool.get('account.invoice').search(self.cr, self.uid, [('reconciled','=',True)])
def order_columns(item, cols=None):
"""
This function is used to display a dictionary as a string, with its columns in the order chosen.
:param item: dict
:param cols: list of field names
:returns: a list of tuples (fieldname: value) in a similar way that would dict.items() do except that the
returned values are following the order given by cols
:rtype: [(key, value)]
"""
if cols is None:
cols = item.keys()
return [(col, item.get(col)) for col in cols if col in item.keys()]
localdict = {
'cr': self.cr,
'uid': self.uid,
'reconciled_inv': reconciled_inv, #specific function used in different tests
'result': None, #used to store the result of the test
'column_order': None, #used to choose the display order of columns (in case you are returning a list of dict)
}
exec code_exec in localdict
result = localdict['result']
column_order = localdict.get('column_order', None)
if not isinstance(result, (tuple, list, set)):
result = [result]
if not result:
result = [_('The test was passed successfully')]
else:
def _format(item):
if isinstance(item, dict):
return ', '.join(["%s: %s" % (tup[0], tup[1]) for tup in order_columns(item, column_order)])
else:
return item
result = [_(_format(rec)) for rec in result]
return result
report_sxw.report_sxw('report.account.test.assert.print', 'accounting.assert.test', 'addons/account_test/report/account_test.rml', parser=report_assert_account, header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mientuma/shop
|
refs/heads/master
|
vendor/doctrine/orm/docs/en/conf.py
|
2448
|
# -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
|
kpkhxlgy0/SublimeText3
|
refs/heads/master
|
Packages/Python PEP8 Autoformat/libs/py26/lib2to3/fixes/fix_metaclass.py
|
7
|
"""Fixer for __metaclass__ = X -> (metaclass=X) methods.
The various forms of classef (inherits nothing, inherits once, inherints
many) don't parse the same in the CST so we look at ALL classes for
a __metaclass__ and if we find one normalize the inherits to all be
an arglist.
For one-liner classes ('class X: pass') there is no indent/dedent so
we normalize those into having a suite.
Moving the __metaclass__ into the classdef can also cause the class
body to be empty so there is some special casing for that as well.
This fixer also tries very hard to keep original indenting and spacing
in all those corner cases.
"""
# Author: Jack Diederich
# Local imports
from .. import fixer_base
from ..pygram import token
from ..fixer_util import Name, syms, Node, Leaf
def has_metaclass(parent):
""" we have to check the cls_node without changing it.
There are two possiblities:
1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
2) clsdef => simple_stmt => expr_stmt => Leaf('__meta')
"""
for node in parent.children:
if node.type == syms.suite:
return has_metaclass(node)
elif node.type == syms.simple_stmt and node.children:
expr_node = node.children[0]
if expr_node.type == syms.expr_stmt and expr_node.children:
left_side = expr_node.children[0]
if isinstance(left_side, Leaf) and \
left_side.value == '__metaclass__':
return True
return False
def fixup_parse_tree(cls_node):
""" one-line classes don't get a suite in the parse tree so we add
one to normalize the tree
"""
for node in cls_node.children:
if node.type == syms.suite:
# already in the prefered format, do nothing
return
# !%@#! oneliners have no suite node, we have to fake one up
for i, node in enumerate(cls_node.children):
if node.type == token.COLON:
break
else:
raise ValueError("No class suite and no ':'!")
# move everything into a suite node
suite = Node(syms.suite, [])
while cls_node.children[i+1:]:
move_node = cls_node.children[i+1]
suite.append_child(move_node.clone())
move_node.remove()
cls_node.append_child(suite)
node = suite
def fixup_simple_stmt(parent, i, stmt_node):
""" if there is a semi-colon all the parts count as part of the same
simple_stmt. We just want the __metaclass__ part so we move
everything efter the semi-colon into its own simple_stmt node
"""
for semi_ind, node in enumerate(stmt_node.children):
if node.type == token.SEMI: # *sigh*
break
else:
return
node.remove() # kill the semicolon
new_expr = Node(syms.expr_stmt, [])
new_stmt = Node(syms.simple_stmt, [new_expr])
while stmt_node.children[semi_ind:]:
move_node = stmt_node.children[semi_ind]
new_expr.append_child(move_node.clone())
move_node.remove()
parent.insert_child(i, new_stmt)
new_leaf1 = new_stmt.children[0].children[0]
old_leaf1 = stmt_node.children[0].children[0]
new_leaf1.prefix = old_leaf1.prefix
def remove_trailing_newline(node):
if node.children and node.children[-1].type == token.NEWLINE:
node.children[-1].remove()
def find_metas(cls_node):
# find the suite node (Mmm, sweet nodes)
for node in cls_node.children:
if node.type == syms.suite:
break
else:
raise ValueError("No class suite!")
# look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ]
for i, simple_node in list(enumerate(node.children)):
if simple_node.type == syms.simple_stmt and simple_node.children:
expr_node = simple_node.children[0]
if expr_node.type == syms.expr_stmt and expr_node.children:
# Check if the expr_node is a simple assignment.
left_node = expr_node.children[0]
if isinstance(left_node, Leaf) and \
left_node.value == u'__metaclass__':
# We found a assignment to __metaclass__.
fixup_simple_stmt(node, i, simple_node)
remove_trailing_newline(simple_node)
yield (node, i, simple_node)
def fixup_indent(suite):
""" If an INDENT is followed by a thing with a prefix then nuke the prefix
Otherwise we get in trouble when removing __metaclass__ at suite start
"""
kids = suite.children[::-1]
# find the first indent
while kids:
node = kids.pop()
if node.type == token.INDENT:
break
# find the first Leaf
while kids:
node = kids.pop()
if isinstance(node, Leaf) and node.type != token.DEDENT:
if node.prefix:
node.prefix = u''
return
else:
kids.extend(node.children[::-1])
class FixMetaclass(fixer_base.BaseFix):
PATTERN = """
classdef<any*>
"""
def transform(self, node, results):
if not has_metaclass(node):
return
fixup_parse_tree(node)
# find metaclasses, keep the last one
last_metaclass = None
for suite, i, stmt in find_metas(node):
last_metaclass = stmt
stmt.remove()
text_type = node.children[0].type # always Leaf(nnn, 'class')
# figure out what kind of classdef we have
if len(node.children) == 7:
# Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite])
# 0 1 2 3 4 5 6
if node.children[3].type == syms.arglist:
arglist = node.children[3]
# Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite])
else:
parent = node.children[3].clone()
arglist = Node(syms.arglist, [parent])
node.set_child(3, arglist)
elif len(node.children) == 6:
# Node(classdef, ['class', 'name', '(', ')', ':', suite])
# 0 1 2 3 4 5
arglist = Node(syms.arglist, [])
node.insert_child(3, arglist)
elif len(node.children) == 4:
# Node(classdef, ['class', 'name', ':', suite])
# 0 1 2 3
arglist = Node(syms.arglist, [])
node.insert_child(2, Leaf(token.RPAR, u')'))
node.insert_child(2, arglist)
node.insert_child(2, Leaf(token.LPAR, u'('))
else:
raise ValueError("Unexpected class definition")
# now stick the metaclass in the arglist
meta_txt = last_metaclass.children[0].children[0]
meta_txt.value = 'metaclass'
orig_meta_prefix = meta_txt.prefix
if arglist.children:
arglist.append_child(Leaf(token.COMMA, u','))
meta_txt.prefix = u' '
else:
meta_txt.prefix = u''
# compact the expression "metaclass = Meta" -> "metaclass=Meta"
expr_stmt = last_metaclass.children[0]
assert expr_stmt.type == syms.expr_stmt
expr_stmt.children[1].prefix = u''
expr_stmt.children[2].prefix = u''
arglist.append_child(last_metaclass)
fixup_indent(suite)
# check for empty suite
if not suite.children:
# one-liner that was just __metaclass_
suite.remove()
pass_leaf = Leaf(text_type, u'pass')
pass_leaf.prefix = orig_meta_prefix
node.append_child(pass_leaf)
node.append_child(Leaf(token.NEWLINE, u'\n'))
elif len(suite.children) > 1 and \
(suite.children[-2].type == token.INDENT and
suite.children[-1].type == token.DEDENT):
# there was only one line in the class body and it was __metaclass__
pass_leaf = Leaf(text_type, u'pass')
suite.insert_child(-1, pass_leaf)
suite.insert_child(-1, Leaf(token.NEWLINE, u'\n'))
|
helldorado/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/network/cnos/cnos.py
|
38
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their own
# license to the complete work.
#
# Copyright (C) 2017 Lenovo, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Contains utility methods
# Lenovo Networking
import time
import socket
import re
import json
try:
from ansible.module_utils.network.cnos import cnos_errorcodes
from ansible.module_utils.network.cnos import cnos_devicerules
HAS_LIB = True
except Exception:
HAS_LIB = False
from distutils.cmd import Command
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.network.common.utils import to_list, EntityCollection
from ansible.module_utils.connection import Connection, exec_command
from ansible.module_utils.connection import ConnectionError
_DEVICE_CONFIGS = {}
_CONNECTION = None
_VALID_USER_ROLES = ['network-admin', 'network-operator']
cnos_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']),
no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']),
type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']),
type='bool'),
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']),
no_log=True),
'timeout': dict(type='int'),
'context': dict(),
'passwords': dict()
}
cnos_argument_spec = {
'provider': dict(type='dict', options=cnos_provider_spec),
}
command_spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict(),
'check_all': dict()
}
def get_provider_argspec():
return cnos_provider_spec
def check_args(module, warnings):
pass
def get_user_roles():
return _VALID_USER_ROLES
def get_connection(module):
global _CONNECTION
if _CONNECTION:
return _CONNECTION
_CONNECTION = Connection(module._socket_path)
context = None
try:
context = module.params['context']
except KeyError:
context = None
if context:
if context == 'system':
command = 'changeto system'
else:
command = 'changeto context %s' % context
_CONNECTION.get(command)
return _CONNECTION
def get_config(module, flags=None):
flags = [] if flags is None else flags
passwords = None
try:
passwords = module.params['passwords']
except KeyError:
passwords = None
if passwords:
cmd = 'more system:running-config'
else:
cmd = 'display running-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return _DEVICE_CONFIGS[cmd]
except KeyError:
conn = get_connection(module)
out = conn.get(cmd)
cfg = to_text(out, errors='surrogate_then_replace').strip()
_DEVICE_CONFIGS[cmd] = cfg
return cfg
def to_commands(module, commands):
if not isinstance(commands, list):
raise AssertionError('argument must be of type <list>')
transform = EntityCollection(module, command_spec)
commands = transform(commands)
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
module.warn('only show commands are supported when using check '
'mode, not executing `%s`' % item['command'])
return commands
def run_commands(module, commands, check_rc=True):
connection = get_connection(module)
connection.get('enable')
commands = to_commands(module, to_list(commands))
responses = list()
for cmd in commands:
out = connection.get(**cmd)
responses.append(to_text(out, errors='surrogate_then_replace'))
return responses
def run_cnos_commands(module, commands, check_rc=True):
retVal = ''
enter_config = {'command': 'configure terminal', 'prompt': None,
'answer': None}
exit_config = {'command': 'end', 'prompt': None, 'answer': None}
commands.insert(0, enter_config)
commands.append(exit_config)
for cmd in commands:
retVal = retVal + '>> ' + cmd['command'] + '\n'
try:
responses = run_commands(module, commands, check_rc)
for response in responses:
retVal = retVal + '<< ' + response + '\n'
except Exception as e:
errMsg = ''
if hasattr(e, 'message'):
errMsg = e.message
else:
errMsg = str(e)
# Exception in Exceptions
if 'VLAN_ACCESS_MAP' in errMsg:
return retVal + '<<' + errMsg + '\n'
if 'confederation identifier' in errMsg:
return retVal + '<<' + errMsg + '\n'
# Add more here if required
retVal = retVal + '<< ' + 'Error-101 ' + errMsg + '\n'
return str(retVal)
def get_capabilities(module):
if hasattr(module, '_cnos_capabilities'):
return module._cnos_capabilities
try:
capabilities = Connection(module._socket_path).get_capabilities()
except ConnectionError as exc:
module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
module._cnos_capabilities = json.loads(capabilities)
return module._cnos_capabilities
def load_config(module, config):
try:
conn = get_connection(module)
conn.get('enable')
resp = conn.edit_config(config)
return resp.get('response')
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
def get_defaults_flag(module):
rc, out, err = exec_command(module, 'display running-config ?')
out = to_text(out, errors='surrogate_then_replace')
commands = set()
for line in out.splitlines():
if line:
commands.add(line.strip().split()[0])
if 'all' in commands:
return 'all'
else:
return 'full'
def enterEnableModeForDevice(enablePassword, timeout, obj):
command = "enable\n"
pwdPrompt = "password:"
# debugOutput(enablePassword)
# debugOutput('\n')
obj.settimeout(int(timeout))
# Executing enable
obj.send(command)
flag = False
retVal = ""
count = 5
while not flag:
# If wait time is execeeded.
if(count == 0):
flag = True
else:
count = count - 1
# A delay of one second
time.sleep(1)
try:
buffByte = obj.recv(9999)
buff = buffByte.decode()
retVal = retVal + buff
# debugOutput(buff)
gotit = buff.find(pwdPrompt)
if(gotit != -1):
time.sleep(1)
if(enablePassword is None or enablePassword == ""):
return "\n Error-106"
obj.send(enablePassword)
obj.send("\r")
obj.send("\n")
time.sleep(1)
innerBuffByte = obj.recv(9999)
innerBuff = innerBuffByte.decode()
retVal = retVal + innerBuff
# debugOutput(innerBuff)
innerGotit = innerBuff.find("#")
if(innerGotit != -1):
return retVal
else:
gotit = buff.find("#")
if(gotit != -1):
return retVal
except Exception:
retVal = retVal + "\n Error-101"
flag = True
if(retVal == ""):
retVal = "\n Error-101"
return retVal
# EOM
def waitForDeviceResponse(command, prompt, timeout, obj):
obj.settimeout(int(timeout))
obj.send(command)
flag = False
retVal = ""
while not flag:
time.sleep(1)
try:
buffByte = obj.recv(9999)
buff = buffByte.decode()
retVal = retVal + buff
# debugOutput(retVal)
gotit = buff.find(prompt)
if(gotit != -1):
flag = True
except Exception:
# debugOutput(prompt)
if prompt == "(yes/no)?":
pass
elif prompt == "Password:":
pass
else:
retVal = retVal + "\n Error-101"
flag = True
return retVal
# EOM
def checkOutputForError(output):
retVal = ""
index = output.lower().find('error')
startIndex = index + 6
if(index == -1):
index = output.lower().find('invalid')
startIndex = index + 8
if(index == -1):
index = output.lower().find('cannot be enabled in l2 interface')
startIndex = index + 34
if(index == -1):
index = output.lower().find('incorrect')
startIndex = index + 10
if(index == -1):
index = output.lower().find('failure')
startIndex = index + 8
if(index == -1):
return None
endIndex = startIndex + 3
errorCode = output[startIndex:endIndex]
result = errorCode.isdigit()
if(result is not True):
return "Device returned an Error. Please check Results for more \
information"
errorFile = "dictionary/ErrorCodes.lvo"
try:
# with open(errorFile, 'r') as f:
f = open(errorFile, 'r')
for line in f:
if('=' in line):
data = line.split('=')
if(data[0].strip() == errorCode):
errorString = data[1].strip()
return errorString
except Exception:
errorString = cnos_errorcodes.getErrorString(errorCode)
errorString = errorString.strip()
return errorString
return "Error Code Not Found"
# EOM
def checkSanityofVariable(deviceType, variableId, variableValue):
retVal = ""
ruleFile = "dictionary/" + deviceType + "_rules.lvo"
ruleString = getRuleStringForVariable(deviceType, ruleFile, variableId)
retVal = validateValueAgainstRule(ruleString, variableValue)
return retVal
# EOM
def getRuleStringForVariable(deviceType, ruleFile, variableId):
retVal = ""
try:
# with open(ruleFile, 'r') as f:
f = open(ruleFile, 'r')
for line in f:
# debugOutput(line)
if(':' in line):
data = line.split(':')
# debugOutput(data[0])
if(data[0].strip() == variableId):
retVal = line
except Exception:
ruleString = cnos_devicerules.getRuleString(deviceType, variableId)
retVal = ruleString.strip()
return retVal
# EOM
def validateValueAgainstRule(ruleString, variableValue):
retVal = ""
if(ruleString == ""):
return 1
rules = ruleString.split(':')
variableType = rules[1].strip()
varRange = rules[2].strip()
if(variableType == "INTEGER"):
result = checkInteger(variableValue)
if(result is True):
return "ok"
else:
return "Error-111"
elif(variableType == "FLOAT"):
result = checkFloat(variableValue)
if(result is True):
return "ok"
else:
return "Error-112"
elif(variableType == "INTEGER_VALUE"):
int_range = varRange.split('-')
r = range(int(int_range[0].strip()), int(int_range[1].strip()))
if(checkInteger(variableValue) is not True):
return "Error-111"
result = int(variableValue) in r
if(result is True):
return "ok"
else:
return "Error-113"
elif(variableType == "INTEGER_VALUE_RANGE"):
int_range = varRange.split('-')
varLower = int_range[0].strip()
varHigher = int_range[1].strip()
r = range(int(varLower), int(varHigher))
val_range = variableValue.split('-')
try:
valLower = val_range[0].strip()
valHigher = val_range[1].strip()
except Exception:
return "Error-113"
if((checkInteger(valLower) is not True) or
(checkInteger(valHigher) is not True)):
# debugOutput("Error-114")
return "Error-114"
result = (int(valLower) in r) and (int(valHigher)in r) \
and (int(valLower) < int(valHigher))
if(result is True):
return "ok"
else:
# debugOutput("Error-113")
return "Error-113"
elif(variableType == "INTEGER_OPTIONS"):
int_options = varRange.split(',')
if(checkInteger(variableValue) is not True):
return "Error-111"
for opt in int_options:
if(opt.strip() is variableValue):
result = True
break
if(result is True):
return "ok"
else:
return "Error-115"
elif(variableType == "LONG"):
result = checkLong(variableValue)
if(result is True):
return "ok"
else:
return "Error-116"
elif(variableType == "LONG_VALUE"):
long_range = varRange.split('-')
r = range(int(long_range[0].strip()), int(long_range[1].strip()))
if(checkLong(variableValue) is not True):
# debugOutput(variableValue)
return "Error-116"
result = int(variableValue) in r
if(result is True):
return "ok"
else:
return "Error-113"
elif(variableType == "LONG_VALUE_RANGE"):
long_range = varRange.split('-')
r = range(int(long_range[0].strip()), int(long_range[1].strip()))
val_range = variableValue.split('-')
if((checkLong(val_range[0]) is not True) or
(checkLong(val_range[1]) is not True)):
return "Error-117"
result = (val_range[0] in r) and (
val_range[1] in r) and (val_range[0] < val_range[1])
if(result is True):
return "ok"
else:
return "Error-113"
elif(variableType == "LONG_OPTIONS"):
long_options = varRange.split(',')
if(checkLong(variableValue) is not True):
return "Error-116"
for opt in long_options:
if(opt.strip() == variableValue):
result = True
break
if(result is True):
return "ok"
else:
return "Error-115"
elif(variableType == "TEXT"):
if(variableValue == ""):
return "Error-118"
if(True is isinstance(variableValue, str)):
return "ok"
else:
return "Error-119"
elif(variableType == "NO_VALIDATION"):
if(variableValue == ""):
return "Error-118"
else:
return "ok"
elif(variableType == "TEXT_OR_EMPTY"):
if(variableValue is None or variableValue == ""):
return "ok"
if(result == isinstance(variableValue, str)):
return "ok"
else:
return "Error-119"
elif(variableType == "MATCH_TEXT"):
if(variableValue == ""):
return "Error-118"
if(isinstance(variableValue, str)):
if(varRange == variableValue):
return "ok"
else:
return "Error-120"
else:
return "Error-119"
elif(variableType == "MATCH_TEXT_OR_EMPTY"):
if(variableValue is None or variableValue == ""):
return "ok"
if(isinstance(variableValue, str)):
if(varRange == variableValue):
return "ok"
else:
return "Error-120"
else:
return "Error-119"
elif(variableType == "TEXT_OPTIONS"):
str_options = varRange.split(',')
if(isinstance(variableValue, str) is not True):
return "Error-119"
result = False
for opt in str_options:
if(opt.strip() == variableValue):
result = True
break
if(result is True):
return "ok"
else:
return "Error-115"
elif(variableType == "TEXT_OPTIONS_OR_EMPTY"):
if(variableValue is None or variableValue == ""):
return "ok"
str_options = varRange.split(',')
if(isinstance(variableValue, str) is not True):
return "Error-119"
for opt in str_options:
if(opt.strip() == variableValue):
result = True
break
if(result is True):
return "ok"
else:
return "Error-115"
elif(variableType == "IPV4Address"):
try:
socket.inet_pton(socket.AF_INET, variableValue)
result = True
except socket.error:
result = False
if(result is True):
return "ok"
else:
return "Error-121"
elif(variableType == "IPV4AddressWithMask"):
if(variableValue is None or variableValue == ""):
return "Error-119"
str_options = variableValue.split('/')
ipaddr = str_options[0]
mask = str_options[1]
try:
socket.inet_pton(socket.AF_INET, ipaddr)
if(checkInteger(mask) is True):
result = True
else:
result = False
except socket.error:
result = False
if(result is True):
return "ok"
else:
return "Error-121"
elif(variableType == "IPV6Address"):
try:
socket.inet_pton(socket.AF_INET6, variableValue)
result = True
except socket.error:
result = False
if(result is True):
return "ok"
else:
return "Error-122"
return retVal
# EOM
def disablePaging(remote_conn):
remote_conn.send("terminal length 0\n")
time.sleep(1)
# Clear the buffer on the screen
outputByte = remote_conn.recv(1000)
output = outputByte.decode()
return output
# EOM
def checkInteger(s):
try:
int(s)
return True
except ValueError:
return False
# EOM
def checkFloat(s):
try:
float(s)
return True
except ValueError:
return False
# EOM
def checkLong(s):
try:
int(s)
return True
except ValueError:
return False
def debugOutput(command):
f = open('debugOutput.txt', 'a')
f.write(str(command)) # python will convert \n to os.linesep
f.close() # you can omit in most cases as the destructor will call it
# EOM
|
jmartinm/invenio
|
refs/heads/master
|
modules/bibformat/lib/elements/bfe_authority_author.py
|
18
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints author data from an Authority Record.
"""
import re
__revision__ = "$Id$"
def format_element(bfo, detail='no'):
""" Prints the data of an author authority record in HTML. By default prints
brief version.
@param detail: whether the 'detailed' rather than the 'brief' format
@type detail: 'yes' or 'no'
"""
from invenio.messages import gettext_set_language
_ = gettext_set_language(bfo.lang) # load the right message language
# return value
out = ""
# local function
def stringify_dict(d):
""" return string composed values in d """
_str = ""
if 'a' in d:
_str += d['a']
if 'd' in d:
_str += ", " + d['d']
return _str or ''
# brief
main_dicts = bfo.fields('100%%')
if len(main_dicts):
main_dict = main_dicts[0]
main = stringify_dict(main_dict)
out += "<p>" + "<strong>" + _("Main %s name") % _("author") + "</strong>" + ": " + main + "</p>"
# detail
if detail.lower() == "yes":
sees = [stringify_dict(see_dict) for see_dict in bfo.fields('400%%')]
sees = filter(None, sees) # fastest way to remove empty ""s
sees = [re.sub(",{2,}",",", x) for x in sees] # prevent ",,"
if len(sees):
out += "<p>" + "<strong>" + _("Variant(s)") + "</strong>" + ": " + ", ".join(sees) + "</p>"
see_alsos = [stringify_dict(see_also_dict) for see_also_dict in bfo.fields('500%%')]
see_alsos = filter(None, see_alsos) # fastest way to remove empty ""s
see_alsos = [re.sub(",{2,}",",", x) for x in see_alsos] # prevent ",,"
if len(see_alsos):
out += "<p>" + "<strong>" + _("See also") + "</strong>" + ": " + ", ".join(see_alsos) + "</p>"
# return
return out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
jbenden/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/netscaler/netscaler_cs_vserver.py
|
7
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: netscaler_cs_vserver
short_description: Manage content switching vserver
description:
- Manage content switching vserver
- This module is intended to run either on the ansible control node or a bastion (jumpserver) with access to the actual netscaler instance
version_added: "2.4"
author: George Nikolopoulos (@giorgos-nikolopoulos)
options:
name:
description:
- >-
Name for the content switching virtual server. Must begin with an ASCII alphanumeric or underscore
C(_) character, and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space,
colon C(:), at sign C(@), equal sign C(=), and hyphen C(-) characters.
- "Cannot be changed after the CS virtual server is created."
- "Minimum length = 1"
td:
description:
- >-
Integer value that uniquely identifies the traffic domain in which you want to configure the entity.
If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID
of 0.
- "Minimum value = 0"
- "Maximum value = 4094"
servicetype:
choices:
- 'HTTP'
- 'SSL'
- 'TCP'
- 'FTP'
- 'RTSP'
- 'SSL_TCP'
- 'UDP'
- 'DNS'
- 'SIP_UDP'
- 'SIP_TCP'
- 'SIP_SSL'
- 'ANY'
- 'RADIUS'
- 'RDP'
- 'MYSQL'
- 'MSSQL'
- 'DIAMETER'
- 'SSL_DIAMETER'
- 'DNS_TCP'
- 'ORACLE'
- 'SMPP'
description:
- "Protocol used by the virtual server."
ipv46:
description:
- "IP address of the content switching virtual server."
- "Minimum length = 1"
targettype:
choices:
- 'GSLB'
description:
- "Virtual server target type."
ippattern:
description:
- >-
IP address pattern, in dotted decimal notation, for identifying packets to be accepted by the virtual
server. The IP Mask parameter specifies which part of the destination IP address is matched against
the pattern. Mutually exclusive with the IP Address parameter.
- >-
For example, if the IP pattern assigned to the virtual server is C(198.51.100.0) and the IP mask is
C(255.255.240.0) (a forward mask), the first 20 bits in the destination IP addresses are matched with
the first 20 bits in the pattern. The virtual server accepts requests with IP addresses that range
from 198.51.96.1 to 198.51.111.254. You can also use a pattern such as C(0.0.2.2) and a mask such as
C(0.0.255.255) (a reverse mask).
- >-
If a destination IP address matches more than one IP pattern, the pattern with the longest match is
selected, and the associated virtual server processes the request. For example, if the virtual
servers, C(vs1) and C(vs2), have the same IP pattern, C(0.0.100.128), but different IP masks of C(0.0.255.255)
and C(0.0.224.255), a destination IP address of 198.51.100.128 has the longest match with the IP pattern
of C(vs1). If a destination IP address matches two or more virtual servers to the same extent, the
request is processed by the virtual server whose port number matches the port number in the request.
ipmask:
description:
- >-
IP mask, in dotted decimal notation, for the IP Pattern parameter. Can have leading or trailing
non-zero octets (for example, C(255.255.240.0) or C(0.0.255.255)). Accordingly, the mask specifies whether
the first n bits or the last n bits of the destination IP address in a client request are to be
matched with the corresponding bits in the IP pattern. The former is called a forward mask. The
latter is called a reverse mask.
range:
description:
- >-
Number of consecutive IP addresses, starting with the address specified by the IP Address parameter,
to include in a range of addresses assigned to this virtual server.
- "Minimum value = C(1)"
- "Maximum value = C(254)"
port:
description:
- "Port number for content switching virtual server."
- "Minimum value = 1"
- "Range C(1) - C(65535)"
- "* in CLI is represented as 65535 in NITRO API"
stateupdate:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- >-
Enable state updates for a specific content switching virtual server. By default, the Content
Switching virtual server is always UP, regardless of the state of the Load Balancing virtual servers
bound to it. This parameter interacts with the global setting as follows:
- "Global Level | Vserver Level | Result"
- "ENABLED ENABLED ENABLED"
- "ENABLED DISABLED ENABLED"
- "DISABLED ENABLED ENABLED"
- "DISABLED DISABLED DISABLED"
- >-
If you want to enable state updates for only some content switching virtual servers, be sure to
disable the state update parameter.
cacheable:
description:
- >-
Use this option to specify whether a virtual server, used for load balancing or content switching,
routes requests to the cache redirection virtual server before sending it to the configured servers.
type: bool
redirecturl:
description:
- >-
URL to which traffic is redirected if the virtual server becomes unavailable. The service type of the
virtual server should be either C(HTTP) or C(SSL).
- >-
Caution: Make sure that the domain in the URL does not match the domain specified for a content
switching policy. If it does, requests are continuously redirected to the unavailable virtual server.
- "Minimum length = 1"
clttimeout:
description:
- "Idle time, in seconds, after which the client connection is terminated. The default values are:"
- "Minimum value = C(0)"
- "Maximum value = C(31536000)"
precedence:
choices:
- 'RULE'
- 'URL'
description:
- >-
Type of precedence to use for both RULE-based and URL-based policies on the content switching virtual
server. With the default C(RULE) setting, incoming requests are evaluated against the rule-based
content switching policies. If none of the rules match, the URL in the request is evaluated against
the URL-based content switching policies.
casesensitive:
description:
- >-
Consider case in URLs (for policies that use URLs instead of RULES). For example, with the C(on)
setting, the URLs /a/1.html and /A/1.HTML are treated differently and can have different targets (set
by content switching policies). With the C(off) setting, /a/1.html and /A/1.HTML are switched to the
same target.
type: bool
somethod:
choices:
- 'CONNECTION'
- 'DYNAMICCONNECTION'
- 'BANDWIDTH'
- 'HEALTH'
- 'NONE'
description:
- >-
Type of spillover used to divert traffic to the backup virtual server when the primary virtual server
reaches the spillover threshold. Connection spillover is based on the number of connections.
Bandwidth spillover is based on the total Kbps of incoming and outgoing traffic.
sopersistence:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- "Maintain source-IP based persistence on primary and backup virtual servers."
sopersistencetimeout:
description:
- "Time-out value, in minutes, for spillover persistence."
- "Minimum value = C(2)"
- "Maximum value = C(1440)"
sothreshold:
description:
- >-
Depending on the spillover method, the maximum number of connections or the maximum total bandwidth
(Kbps) that a virtual server can handle before spillover occurs.
- "Minimum value = C(1)"
- "Maximum value = C(4294967287)"
sobackupaction:
choices:
- 'DROP'
- 'ACCEPT'
- 'REDIRECT'
description:
- >-
Action to be performed if spillover is to take effect, but no backup chain to spillover is usable or
exists.
redirectportrewrite:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- "State of port rewrite while performing HTTP redirect."
downstateflush:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- >-
Flush all active transactions associated with a virtual server whose state transitions from UP to
DOWN. Do not enable this option for applications that must complete their transactions.
backupvserver:
description:
- >-
Name of the backup virtual server that you are configuring. Must begin with an ASCII alphanumeric or
underscore C(_) character, and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.),
space C( ), colon C(:), at sign C(@), equal sign C(=), and hyphen C(-) characters. Can be changed after the
backup virtual server is created. You can assign a different backup virtual server or rename the
existing virtual server.
- "Minimum length = 1"
disableprimaryondown:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- >-
Continue forwarding the traffic to backup virtual server even after the primary server comes UP from
the DOWN state.
insertvserveripport:
choices:
- 'OFF'
- 'VIPADDR'
- 'V6TOV4MAPPING'
description:
- >-
Insert the virtual server's VIP address and port number in the request header. Available values
function as follows:
- "C(VIPADDR) - Header contains the vserver's IP address and port number without any translation."
- "C(OFF) - The virtual IP and port header insertion option is disabled."
- >-
C(V6TOV4MAPPING) - Header contains the mapped IPv4 address corresponding to the IPv6 address of the
vserver and the port number. An IPv6 address can be mapped to a user-specified IPv4 address using the
set ns ip6 command.
vipheader:
description:
- "Name of virtual server IP and port header, for use with the VServer IP Port Insertion parameter."
- "Minimum length = 1"
rtspnat:
description:
- "Enable network address translation (NAT) for real-time streaming protocol (RTSP) connections."
type: bool
authenticationhost:
description:
- >-
FQDN of the authentication virtual server. The service type of the virtual server should be either
C(HTTP) or C(SSL).
- "Minimum length = 3"
- "Maximum length = 252"
authentication:
description:
- "Authenticate users who request a connection to the content switching virtual server."
type: bool
listenpolicy:
description:
- >-
String specifying the listen policy for the content switching virtual server. Can be either the name
of an existing expression or an in-line expression.
authn401:
description:
- "Enable HTTP 401-response based authentication."
type: bool
authnvsname:
description:
- >-
Name of authentication virtual server that authenticates the incoming user requests to this content
switching virtual server. .
- "Minimum length = 1"
- "Maximum length = 252"
push:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- >-
Process traffic with the push virtual server that is bound to this content switching virtual server
(specified by the Push VServer parameter). The service type of the push virtual server should be
either C(HTTP) or C(SSL).
pushvserver:
description:
- >-
Name of the load balancing virtual server, of type C(PUSH) or C(SSL_PUSH), to which the server pushes
updates received on the client-facing load balancing virtual server.
- "Minimum length = 1"
pushlabel:
description:
- >-
Expression for extracting the label from the response received from server. This string can be either
an existing rule name or an inline expression. The service type of the virtual server should be
either C(HTTP) or C(SSL).
pushmulticlients:
description:
- >-
Allow multiple Web 2.0 connections from the same client to connect to the virtual server and expect
updates.
type: bool
tcpprofilename:
description:
- "Name of the TCP profile containing TCP configuration settings for the virtual server."
- "Minimum length = 1"
- "Maximum length = 127"
httpprofilename:
description:
- >-
Name of the HTTP profile containing HTTP configuration settings for the virtual server. The service
type of the virtual server should be either C(HTTP) or C(SSL).
- "Minimum length = 1"
- "Maximum length = 127"
dbprofilename:
description:
- "Name of the DB profile."
- "Minimum length = 1"
- "Maximum length = 127"
oracleserverversion:
choices:
- '10G'
- '11G'
description:
- "Oracle server version."
comment:
description:
- "Information about this virtual server."
mssqlserverversion:
choices:
- '70'
- '2000'
- '2000SP1'
- '2005'
- '2008'
- '2008R2'
- '2012'
- '2014'
description:
- "The version of the MSSQL server."
l2conn:
description:
- "Use L2 Parameters to identify a connection."
mysqlprotocolversion:
description:
- "The protocol version returned by the mysql vserver."
mysqlserverversion:
description:
- "The server version string returned by the mysql vserver."
- "Minimum length = 1"
- "Maximum length = 31"
mysqlcharacterset:
description:
- "The character set returned by the mysql vserver."
mysqlservercapabilities:
description:
- "The server capabilities returned by the mysql vserver."
appflowlog:
choices:
- 'ENABLED'
- 'DISABLED'
description:
- "Enable logging appflow flow information."
netprofile:
description:
- "The name of the network profile."
- "Minimum length = 1"
- "Maximum length = 127"
icmpvsrresponse:
choices:
- 'PASSIVE'
- 'ACTIVE'
description:
- "Can be active or passive."
rhistate:
choices:
- 'PASSIVE'
- 'ACTIVE'
description:
- "A host route is injected according to the setting on the virtual servers"
- >-
* If set to C(PASSIVE) on all the virtual servers that share the IP address, the appliance always
injects the hostroute.
- >-
* If set to C(ACTIVE) on all the virtual servers that share the IP address, the appliance injects even
if one virtual server is UP.
- >-
* If set to C(ACTIVE) on some virtual servers and C(PASSIVE) on the others, the appliance, injects even if
one virtual server set to C(ACTIVE) is UP.
authnprofile:
description:
- "Name of the authentication profile to be used when authentication is turned on."
dnsprofilename:
description:
- >-
Name of the DNS profile to be associated with the VServer. DNS profile properties will applied to the
transactions processed by a VServer. This parameter is valid only for DNS and DNS-TCP VServers.
- "Minimum length = 1"
- "Maximum length = 127"
domainname:
description:
- "Domain name for which to change the time to live (TTL) and/or backup service IP address."
- "Minimum length = 1"
ttl:
description:
- "."
- "Minimum value = C(1)"
backupip:
description:
- "."
- "Minimum length = 1"
cookiedomain:
description:
- "."
- "Minimum length = 1"
cookietimeout:
description:
- "."
- "Minimum value = C(0)"
- "Maximum value = C(1440)"
sitedomainttl:
description:
- "."
- "Minimum value = C(1)"
disabled:
description:
- When set to C(yes) the cs vserver will be disabled.
- When set to C(no) the cs vserver will be enabled.
- >-
Note that due to limitations of the underlying NITRO API a C(disabled) state change alone
does not cause the module result to report a changed status.
type: bool
default: 'no'
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
# policy_1 must have been already created with the netscaler_cs_policy module
# lbvserver_1 must have been already created with the netscaler_lb_vserver module
- name: Setup content switching vserver
delegate_to: localhost
netscaler_cs_vserver:
nsip: 172.18.0.2
nitro_user: nsroot
nitro_pass: nsroot
state: present
name: cs_vserver_1
ipv46: 192.168.1.1
port: 80
servicetype: HTTP
policybindings:
- policyname: policy_1
targetlbvserver: lbvserver_1
'''
RETURN = '''
loglines:
description: list of logged messages by the module
returned: always
type: list
sample: ['message 1', 'message 2']
msg:
description: Message detailing the failure reason
returned: failure
type: str
sample: "Action does not exist"
diff:
description: List of differences between the actual configured object and the configuration specified in the module
returned: failure
type: dict
sample: { 'clttimeout': 'difference. ours: (float) 100.0 other: (float) 60.0' }
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netscaler import (
ConfigProxy,
get_nitro_client,
netscaler_common_arguments,
log,
loglines,
ensure_feature_is_enabled,
get_immutables_intersection
)
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.cs.csvserver import csvserver
from nssrc.com.citrix.netscaler.nitro.resource.config.cs.csvserver_cspolicy_binding import csvserver_cspolicy_binding
from nssrc.com.citrix.netscaler.nitro.resource.config.ssl.sslvserver_sslcertkey_binding import sslvserver_sslcertkey_binding
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
def cs_vserver_exists(client, module):
if csvserver.count_filtered(client, 'name:%s' % module.params['name']) > 0:
return True
else:
return False
def cs_vserver_identical(client, module, csvserver_proxy):
csvserver_list = csvserver.get_filtered(client, 'name:%s' % module.params['name'])
diff_dict = csvserver_proxy.diff_object(csvserver_list[0])
if len(diff_dict) == 0:
return True
else:
return False
def get_configured_policybindings(client, module):
log('Getting configured policy bindigs')
bindings = {}
if module.params['policybindings'] is None:
return bindings
for binding in module.params['policybindings']:
binding['name'] = module.params['name']
key = binding['policyname']
binding_proxy = ConfigProxy(
actual=csvserver_cspolicy_binding(),
client=client,
readwrite_attrs=[
'priority',
'bindpoint',
'policyname',
'labelname',
'gotopriorityexpression',
'targetlbvserver',
'name',
'invoke',
'labeltype',
],
readonly_attrs=[],
attribute_values_dict=binding
)
bindings[key] = binding_proxy
return bindings
def get_actual_policybindings(client, module):
log('Getting actual policy bindigs')
bindings = {}
try:
count = csvserver_cspolicy_binding.count(client, name=module.params['name'])
if count == 0:
return bindings
except nitro_exception as e:
if e.errorcode == 258:
return bindings
else:
raise
for binding in csvserver_cspolicy_binding.get(client, name=module.params['name']):
key = binding.policyname
bindings[key] = binding
return bindings
def cs_policybindings_identical(client, module):
log('Checking policy bindings identical')
actual_bindings = get_actual_policybindings(client, module)
configured_bindings = get_configured_policybindings(client, module)
actual_keyset = set(actual_bindings.keys())
configured_keyset = set(configured_bindings.keys())
if len(actual_keyset ^ configured_keyset) > 0:
return False
# Compare item to item
for key in actual_bindings.keys():
configured_binding_proxy = configured_bindings[key]
actual_binding_object = actual_bindings[key]
if not configured_binding_proxy.has_equal_attributes(actual_binding_object):
return False
# Fallthrough to success
return True
def sync_cs_policybindings(client, module):
log('Syncing cs policybindings')
# Delete all actual bindings
for binding in get_actual_policybindings(client, module).values():
log('Deleting binding for policy %s' % binding.policyname)
csvserver_cspolicy_binding.delete(client, binding)
# Add all configured bindings
for binding in get_configured_policybindings(client, module).values():
log('Adding binding for policy %s' % binding.policyname)
binding.add()
def ssl_certkey_bindings_identical(client, module):
log('Checking if ssl cert key bindings are identical')
vservername = module.params['name']
if sslvserver_sslcertkey_binding.count(client, vservername) == 0:
bindings = []
else:
bindings = sslvserver_sslcertkey_binding.get(client, vservername)
if module.params['ssl_certkey'] is None:
if len(bindings) == 0:
return True
else:
return False
else:
certificate_list = [item.certkeyname for item in bindings]
if certificate_list == [module.params['ssl_certkey']]:
return True
else:
return False
def ssl_certkey_bindings_sync(client, module):
log('Syncing certkey bindings')
vservername = module.params['name']
if sslvserver_sslcertkey_binding.count(client, vservername) == 0:
bindings = []
else:
bindings = sslvserver_sslcertkey_binding.get(client, vservername)
# Delete existing bindings
for binding in bindings:
log('Deleting existing binding for certkey %s' % binding.certkeyname)
sslvserver_sslcertkey_binding.delete(client, binding)
# Add binding if appropriate
if module.params['ssl_certkey'] is not None:
log('Adding binding for certkey %s' % module.params['ssl_certkey'])
binding = sslvserver_sslcertkey_binding()
binding.vservername = module.params['name']
binding.certkeyname = module.params['ssl_certkey']
sslvserver_sslcertkey_binding.add(client, binding)
def diff_list(client, module, csvserver_proxy):
csvserver_list = csvserver.get_filtered(client, 'name:%s' % module.params['name'])
return csvserver_proxy.diff_object(csvserver_list[0])
def do_state_change(client, module, csvserver_proxy):
if module.params['disabled']:
log('Disabling cs vserver')
result = csvserver.disable(client, csvserver_proxy.actual)
else:
log('Enabling cs vserver')
result = csvserver.enable(client, csvserver_proxy.actual)
return result
def main():
module_specific_arguments = dict(
name=dict(type='str'),
td=dict(type='float'),
servicetype=dict(
type='str',
choices=[
'HTTP',
'SSL',
'TCP',
'FTP',
'RTSP',
'SSL_TCP',
'UDP',
'DNS',
'SIP_UDP',
'SIP_TCP',
'SIP_SSL',
'ANY',
'RADIUS',
'RDP',
'MYSQL',
'MSSQL',
'DIAMETER',
'SSL_DIAMETER',
'DNS_TCP',
'ORACLE',
'SMPP'
]
),
ipv46=dict(type='str'),
dnsrecordtype=dict(
type='str',
choices=[
'A',
'AAAA',
'CNAME',
'NAPTR',
]
),
ippattern=dict(type='str'),
ipmask=dict(type='str'),
range=dict(type='float'),
port=dict(type='int'),
stateupdate=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
cacheable=dict(type='bool'),
redirecturl=dict(type='str'),
clttimeout=dict(type='float'),
precedence=dict(
type='str',
choices=[
'RULE',
'URL',
]
),
casesensitive=dict(type='bool'),
somethod=dict(
type='str',
choices=[
'CONNECTION',
'DYNAMICCONNECTION',
'BANDWIDTH',
'HEALTH',
'NONE',
]
),
sopersistence=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
sopersistencetimeout=dict(type='float'),
sothreshold=dict(type='float'),
sobackupaction=dict(
type='str',
choices=[
'DROP',
'ACCEPT',
'REDIRECT',
]
),
redirectportrewrite=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
downstateflush=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
disableprimaryondown=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
insertvserveripport=dict(
type='str',
choices=[
'OFF',
'VIPADDR',
'V6TOV4MAPPING',
]
),
vipheader=dict(type='str'),
rtspnat=dict(type='bool'),
authenticationhost=dict(type='str'),
authentication=dict(type='bool'),
listenpolicy=dict(type='str'),
authn401=dict(type='bool'),
authnvsname=dict(type='str'),
push=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
pushvserver=dict(type='str'),
pushlabel=dict(type='str'),
pushmulticlients=dict(type='bool'),
tcpprofilename=dict(type='str'),
httpprofilename=dict(type='str'),
dbprofilename=dict(type='str'),
oracleserverversion=dict(
type='str',
choices=[
'10G',
'11G',
]
),
comment=dict(type='str'),
mssqlserverversion=dict(
type='str',
choices=[
'70',
'2000',
'2000SP1',
'2005',
'2008',
'2008R2',
'2012',
'2014',
]
),
l2conn=dict(type='bool'),
mysqlprotocolversion=dict(type='float'),
mysqlserverversion=dict(type='str'),
mysqlcharacterset=dict(type='float'),
mysqlservercapabilities=dict(type='float'),
appflowlog=dict(
type='str',
choices=[
'ENABLED',
'DISABLED',
]
),
netprofile=dict(type='str'),
icmpvsrresponse=dict(
type='str',
choices=[
'PASSIVE',
'ACTIVE',
]
),
rhistate=dict(
type='str',
choices=[
'PASSIVE',
'ACTIVE',
]
),
authnprofile=dict(type='str'),
dnsprofilename=dict(type='str'),
)
hand_inserted_arguments = dict(
policybindings=dict(type='list'),
ssl_certkey=dict(type='str'),
disabled=dict(
type='bool',
default=False
),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
argument_spec.update(hand_inserted_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines,
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
readwrite_attrs = [
'name',
'td',
'servicetype',
'ipv46',
'dnsrecordtype',
'ippattern',
'ipmask',
'range',
'port',
'stateupdate',
'cacheable',
'redirecturl',
'clttimeout',
'precedence',
'casesensitive',
'somethod',
'sopersistence',
'sopersistencetimeout',
'sothreshold',
'sobackupaction',
'redirectportrewrite',
'downstateflush',
'disableprimaryondown',
'insertvserveripport',
'vipheader',
'rtspnat',
'authenticationhost',
'authentication',
'listenpolicy',
'authn401',
'authnvsname',
'push',
'pushvserver',
'pushlabel',
'pushmulticlients',
'tcpprofilename',
'httpprofilename',
'dbprofilename',
'oracleserverversion',
'comment',
'mssqlserverversion',
'l2conn',
'mysqlprotocolversion',
'mysqlserverversion',
'mysqlcharacterset',
'mysqlservercapabilities',
'appflowlog',
'netprofile',
'icmpvsrresponse',
'rhistate',
'authnprofile',
'dnsprofilename',
]
readonly_attrs = [
'ip',
'value',
'ngname',
'type',
'curstate',
'sc',
'status',
'cachetype',
'redirect',
'homepage',
'dnsvservername',
'domain',
'policyname',
'servicename',
'weight',
'cachevserver',
'targetvserver',
'priority',
'url',
'gotopriorityexpression',
'bindpoint',
'invoke',
'labeltype',
'labelname',
'gt2gb',
'statechangetimesec',
'statechangetimemsec',
'tickssincelaststatechange',
'ruletype',
'lbvserver',
'targetlbvserver',
]
immutable_attrs = [
'name',
'td',
'servicetype',
'ipv46',
'targettype',
'range',
'port',
'state',
'vipheader',
'newname',
]
transforms = {
'cacheable': ['bool_yes_no'],
'rtspnat': ['bool_on_off'],
'authn401': ['bool_on_off'],
'casesensitive': ['bool_on_off'],
'authentication': ['bool_on_off'],
'l2conn': ['bool_on_off'],
'pushmulticlients': ['bool_yes_no'],
}
# Instantiate config proxy
csvserver_proxy = ConfigProxy(
actual=csvserver(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
)
try:
ensure_feature_is_enabled(client, 'CS')
# Apply appropriate state
if module.params['state'] == 'present':
log('Applying actions for state present')
if not cs_vserver_exists(client, module):
if not module.check_mode:
csvserver_proxy.add()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not cs_vserver_identical(client, module, csvserver_proxy):
# Check if we try to change value of immutable attributes
immutables_changed = get_immutables_intersection(csvserver_proxy, diff_list(client, module, csvserver_proxy).keys())
if immutables_changed != []:
module.fail_json(
msg='Cannot update immutable attributes %s' % (immutables_changed,),
diff=diff_list(client, module, csvserver_proxy),
**module_result
)
if not module.check_mode:
csvserver_proxy.update()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Check policybindings
if not cs_policybindings_identical(client, module):
if not module.check_mode:
sync_cs_policybindings(client, module)
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
if module.params['servicetype'] != 'SSL' and module.params['ssl_certkey'] is not None:
module.fail_json(msg='ssl_certkey is applicable only to SSL vservers', **module_result)
# Check ssl certkey bindings
if module.params['servicetype'] == 'SSL':
if not ssl_certkey_bindings_identical(client, module):
if not module.check_mode:
ssl_certkey_bindings_sync(client, module)
module_result['changed'] = True
if not module.check_mode:
res = do_state_change(client, module, csvserver_proxy)
if res.errorcode != 0:
msg = 'Error when setting disabled state. errorcode: %s message: %s' % (res.errorcode, res.message)
module.fail_json(msg=msg, **module_result)
# Sanity check for state
if not module.check_mode:
log('Sanity checks for state present')
if not cs_vserver_exists(client, module):
module.fail_json(msg='CS vserver does not exist', **module_result)
if not cs_vserver_identical(client, module, csvserver_proxy):
module.fail_json(msg='CS vserver differs from configured', diff=diff_list(client, module, csvserver_proxy), **module_result)
if not cs_policybindings_identical(client, module):
module.fail_json(msg='Policy bindings differ')
if module.params['servicetype'] == 'SSL':
if not ssl_certkey_bindings_identical(client, module):
module.fail_json(msg='sll certkey bindings not identical', **module_result)
elif module.params['state'] == 'absent':
log('Applying actions for state absent')
if cs_vserver_exists(client, module):
if not module.check_mode:
csvserver_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
if not module.check_mode:
log('Sanity checks for state absent')
if cs_vserver_exists(client, module):
module.fail_json(msg='CS vserver still exists', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
|
mozilla/kitsune
|
refs/heads/master
|
kitsune/dashboards/models.py
|
1
|
import logging
from datetime import date, timedelta
from django.conf import settings
from django.db import connection, close_old_connections
from django.db import models
from django.utils.translation import ugettext_lazy as _lazy
from kitsune.dashboards import LAST_7_DAYS, LAST_30_DAYS, LAST_90_DAYS, ALL_TIME, PERIODS
from kitsune.products.models import Product
from kitsune.sumo.models import ModelBase, LocaleField
from kitsune.sumo import googleanalytics
from kitsune.wiki.models import Document
log = logging.getLogger("k.dashboards")
def period_dates(period):
"""Return when each period begins and ends."""
end = date.today() - timedelta(days=1) # yesterday
if period == LAST_7_DAYS:
start = end - timedelta(days=7)
elif period == LAST_30_DAYS:
start = end - timedelta(days=30)
elif period == LAST_90_DAYS:
start = end - timedelta(days=90)
elif ALL_TIME:
start = settings.GA_START_DATE
return start, end
class WikiDocumentVisits(ModelBase):
"""Web stats for Knowledge Base Documents"""
document = models.ForeignKey(Document, on_delete=models.CASCADE, related_name="visits")
visits = models.IntegerField(db_index=True)
period = models.IntegerField(choices=PERIODS) # indexed by unique_together
class Meta(object):
unique_together = ("period", "document")
@classmethod
def reload_period_from_analytics(cls, period, verbose=False):
"""Replace the stats for the given period from Google Analytics."""
counts = googleanalytics.pageviews_by_document(*period_dates(period), verbose=verbose)
if counts:
# Close any existing connections because our load balancer times
# them out at 5 minutes and the GA calls take forever.
close_old_connections()
# Delete and remake the rows:
# Horribly inefficient until
# http://code.djangoproject.com/ticket/9519 is fixed.
# cls.objects.filter(period=period).delete()
# Instead, we use raw SQL!
cursor = connection.cursor()
cursor.execute(
"DELETE FROM `dashboards_wikidocumentvisits`" " WHERE `period` = %s", [period]
)
# Now we create them again with fresh data.
for doc_id, visits in counts.items():
cls.objects.create(document=Document(pk=doc_id), visits=visits, period=period)
else:
# Don't erase interesting data if there's nothing to replace it:
log.warning("Google Analytics returned no interesting data," " so I kept what I had.")
L10N_TOP20_CODE = "percent_localized_top20"
L10N_TOP100_CODE = "percent_localized_top100"
L10N_ALL_CODE = "percent_localized_all"
L10N_ACTIVE_CONTRIBUTORS_CODE = "active_contributors"
METRIC_CODE_CHOICES = (
(L10N_TOP20_CODE, _lazy("Percent Localized: Top 20")),
(L10N_TOP100_CODE, _lazy("Percent Localized: Top 100")),
(L10N_ALL_CODE, _lazy("Percent Localized: All")),
(L10N_ACTIVE_CONTRIBUTORS_CODE, _lazy("Monthly Active Contributors")),
)
class WikiMetric(ModelBase):
"""A single numeric measurement for a locale, product and date.
For example, the percentage of all FxOS articles localized to Spanish."""
code = models.CharField(db_index=True, max_length=255, choices=METRIC_CODE_CHOICES)
locale = LocaleField(db_index=True, null=True, blank=True)
product = models.ForeignKey(Product, on_delete=models.CASCADE, null=True, blank=True)
date = models.DateField()
value = models.FloatField()
class Meta(object):
unique_together = ("code", "product", "locale", "date")
ordering = ["-date"]
def __str__(self):
return "[{date}][{locale}][{product}] {code}: {value}".format(
date=self.date,
code=self.code,
locale=self.locale,
value=self.value,
product=self.product,
)
|
Jacobinski/SaltBot
|
refs/heads/master
|
website/interface.py
|
1
|
'''
The interface module for SaltBot
'''
from bs4 import BeautifulSoup
import requests
import time
import json
URL_JSON = "http://www.saltybet.com/state.json"
class interface:
def __init__(self, session, request):
# Match session
self.session = session
self.request = request
self.match_json = json.loads(session.get(URL_JSON).content)
# Match Details
soup = BeautifulSoup(request.content, 'html.parser')
self.balance = int(soup.find(id="balance").string.replace(',',''))
#TODO: What does this field do?
def get_alert(self):
return self.match_json['alert']
def get_balance(self):
return self.balance
def get_betting_status(self):
return self.match_json['status']
def get_json(self):
return self.match_json
def get_player1_name(self):
return self.match_json['p1name']
def get_player1_wagers(self):
return str(self.match_json['p1total']).replace(',','')
def get_player2_name(self):
return self.match_json['p2name']
def get_player2_wagers(self):
return str(self.match_json['p2total']).replace(',','')
def get_remaining(self):
return self.match_json['remaining']
def update(self):
# Refresh the request
self.request = self.session.get(self.request.url)
# Check to see if the match status changed
new_match = json.loads(self.session.get(URL_JSON).content)
if (self.match_json != new_match):
soup = BeautifulSoup(self.request.content, 'html.parser')
self.match_json = new_match
self.balance = int(soup.find(id="balance").string.replace(',',''))
|
tomevans/spectroscopy
|
refs/heads/master
|
setup.py
|
2
|
from distutils.core import setup
setup( name='spectroscopy',
version='0.0.1',
description='A Python package for extracting spectra from single slit and multiple slit spectroscopy frames.',
author='Tom Evans',
author_email='tom.evans@astro.ox.ac.uk',
url='https://github.com/tomevans/spectroscopy',
packages=[ 'spectroscopy' ],
)
|
krkeegan/insteon-mngr
|
refs/heads/master
|
insteon_mngr/sequences/modem.py
|
3
|
from insteon_mngr.trigger import PLMTrigger
from insteon_mngr.sequences.common import WriteALDBRecord
class WriteALDBRecordModem(WriteALDBRecord):
def _perform_write(self):
super()._perform_write()
if self.in_use is True:
self.data1 = self._linked_group.device.dev_cat
self.data2 = self._linked_group.device.sub_cat
self.data3 = self._linked_group.device.firmware
msg = self._group.device.create_message('all_link_manage_rec')
msg_attributes = self._compiled_record()
msg.insert_bytes_into_raw(msg_attributes)
trigger_attributes = {
'plm_cmd': 0x6F,
'ctrl_code': msg_attributes['ctrl_code'],
'link_flags': msg_attributes['link_flags'],
'group': msg_attributes['group'],
'dev_addr_hi': msg_attributes['dev_addr_hi'],
'dev_addr_mid': msg_attributes['dev_addr_mid'],
'dev_addr_low': msg_attributes['dev_addr_low'],
'data_1': msg_attributes['data_1'],
'data_2': msg_attributes['data_2'],
'data_3': msg_attributes['data_3']
}
trigger = PLMTrigger(plm=self._group.device,
attributes=trigger_attributes)
trigger.trigger_function = lambda: self._save_record()
trigger.name = self._group.device.dev_addr_str + 'write_aldb'
trigger.queue()
self._group.device.queue_device_msg(msg)
def _ctrl_code(self, search_bytes):
records = self._group.device.aldb.get_matching_records(search_bytes)
ctrl_code = 0x20
if len(records) == 0 and self.controller is True:
ctrl_code = 0x40
if len(records) == 0 and self.controller is False:
ctrl_code = 0x41
return ctrl_code
def _compiled_record(self):
ret = super()._compiled_record()
del ret['msb']
del ret['lsb']
if not self.in_use:
record = self._group.device.aldb.get_record(self.key)
record_parsed = record.parse_record()
ret['link_flags'] = record_parsed['link_flags']
ret['group'] = record_parsed['group']
ret['dev_addr_hi'] = record_parsed['dev_addr_hi']
ret['dev_addr_mid'] = record_parsed['dev_addr_mid']
ret['dev_addr_low'] = record_parsed['dev_addr_low']
ret['ctrl_code'] = 0x80
else:
search_bytes = {
'link_flags': ret['link_flags'],
'group': ret['group'],
'dev_addr_hi': ret['dev_addr_hi'],
'dev_addr_mid': ret['dev_addr_mid'],
'dev_addr_low': ret['dev_addr_low']
}
ret['ctrl_code'] = self._ctrl_code(search_bytes)
return ret
def _save_record(self):
compiled = self._compiled_record()
aldb_entry = bytearray([
compiled['link_flags'],
compiled['group'],
compiled['dev_addr_hi'],
compiled['dev_addr_mid'],
compiled['dev_addr_low'],
compiled['data_1'],
compiled['data_2'],
compiled['data_3']
])
if self.in_use is False:
aldb_entry = bytearray(8)
record = self._group.device.aldb.get_record(self.key)
record.edit_record(aldb_entry)
self._on_success()
def _write_failure(self):
self._on_failure()
def start(self):
'''Starts the sequence to write the aldb record'''
if self.linked_group is None and self.in_use:
print('error no linked_group defined')
else:
self._perform_write()
|
bhairavmehta95/flashcard-helper-alexa-skill
|
refs/heads/master
|
sqlalchemy/engine/default.py
|
10
|
# engine/default.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Default implementations of per-dialect sqlalchemy.engine classes.
These are semi-private implementation classes which are only of importance
to database dialect authors; dialects will usually use the classes here
as the base class for their own corresponding classes.
"""
import re
import random
from . import reflection, interfaces, result
from ..sql import compiler, expression, schema
from .. import types as sqltypes
from .. import exc, util, pool, processors
import codecs
import weakref
from .. import event
AUTOCOMMIT_REGEXP = re.compile(
r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
re.I | re.UNICODE)
# When we're handed literal SQL, ensure it's a SELECT query
SERVER_SIDE_CURSOR_RE = re.compile(
r'\s*SELECT',
re.I | re.UNICODE)
class DefaultDialect(interfaces.Dialect):
"""Default implementation of Dialect"""
statement_compiler = compiler.SQLCompiler
ddl_compiler = compiler.DDLCompiler
type_compiler = compiler.GenericTypeCompiler
preparer = compiler.IdentifierPreparer
supports_alter = True
# the first value we'd get for an autoincrement
# column.
default_sequence_base = 1
# most DBAPIs happy with this for execute().
# not cx_oracle.
execute_sequence_format = tuple
supports_views = True
supports_sequences = False
sequences_optional = False
preexecute_autoincrement_sequences = False
postfetch_lastrowid = True
implicit_returning = False
supports_right_nested_joins = True
supports_native_enum = False
supports_native_boolean = False
supports_simple_order_by_label = True
engine_config_types = util.immutabledict([
('convert_unicode', util.bool_or_str('force')),
('pool_timeout', util.asint),
('echo', util.bool_or_str('debug')),
('echo_pool', util.bool_or_str('debug')),
('pool_recycle', util.asint),
('pool_size', util.asint),
('max_overflow', util.asint),
('pool_threadlocal', util.asbool),
])
# if the NUMERIC type
# returns decimal.Decimal.
# *not* the FLOAT type however.
supports_native_decimal = False
if util.py3k:
supports_unicode_statements = True
supports_unicode_binds = True
returns_unicode_strings = True
description_encoding = None
else:
supports_unicode_statements = False
supports_unicode_binds = False
returns_unicode_strings = False
description_encoding = 'use_encoding'
name = 'default'
# length at which to truncate
# any identifier.
max_identifier_length = 9999
# length at which to truncate
# the name of an index.
# Usually None to indicate
# 'use max_identifier_length'.
# thanks to MySQL, sigh
max_index_name_length = None
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
dbapi_type_map = {}
colspecs = {}
default_paramstyle = 'named'
supports_default_values = False
supports_empty_insert = True
supports_multivalues_insert = False
supports_server_side_cursors = False
server_version_info = None
construct_arguments = None
"""Optional set of argument specifiers for various SQLAlchemy
constructs, typically schema items.
To implement, establish as a series of tuples, as in::
construct_arguments = [
(schema.Index, {
"using": False,
"where": None,
"ops": None
})
]
If the above construct is established on the PostgreSQL dialect,
the :class:`.Index` construct will now accept the keyword arguments
``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``.
Any other argument specified to the constructor of :class:`.Index`
which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`.
A dialect which does not include a ``construct_arguments`` member will
not participate in the argument validation system. For such a dialect,
any argument name is accepted by all participating constructs, within
the namespace of arguments prefixed with that dialect name. The rationale
here is so that third-party dialects that haven't yet implemented this
feature continue to function in the old way.
.. versionadded:: 0.9.2
.. seealso::
:class:`.DialectKWArgs` - implementing base class which consumes
:attr:`.DefaultDialect.construct_arguments`
"""
# indicates symbol names are
# UPPERCASEd if they are case insensitive
# within the database.
# if this is True, the methods normalize_name()
# and denormalize_name() must be provided.
requires_name_normalize = False
reflection_options = ()
dbapi_exception_translation_map = util.immutabledict()
"""mapping used in the extremely unusual case that a DBAPI's
published exceptions don't actually have the __name__ that they
are linked towards.
.. versionadded:: 1.0.5
"""
def __init__(self, convert_unicode=False,
encoding='utf-8', paramstyle=None, dbapi=None,
implicit_returning=None,
supports_right_nested_joins=None,
case_sensitive=True,
supports_native_boolean=None,
label_length=None, **kwargs):
if not getattr(self, 'ported_sqla_06', True):
util.warn(
"The %s dialect is not yet ported to the 0.6 format" %
self.name)
self.convert_unicode = convert_unicode
self.encoding = encoding
self.positional = False
self._ischema = None
self.dbapi = dbapi
if paramstyle is not None:
self.paramstyle = paramstyle
elif self.dbapi is not None:
self.paramstyle = self.dbapi.paramstyle
else:
self.paramstyle = self.default_paramstyle
if implicit_returning is not None:
self.implicit_returning = implicit_returning
self.positional = self.paramstyle in ('qmark', 'format', 'numeric')
self.identifier_preparer = self.preparer(self)
self.type_compiler = self.type_compiler(self)
if supports_right_nested_joins is not None:
self.supports_right_nested_joins = supports_right_nested_joins
if supports_native_boolean is not None:
self.supports_native_boolean = supports_native_boolean
self.case_sensitive = case_sensitive
if label_length and label_length > self.max_identifier_length:
raise exc.ArgumentError(
"Label length of %d is greater than this dialect's"
" maximum identifier length of %d" %
(label_length, self.max_identifier_length))
self.label_length = label_length
if self.description_encoding == 'use_encoding':
self._description_decoder = \
processors.to_unicode_processor_factory(
encoding
)
elif self.description_encoding is not None:
self._description_decoder = \
processors.to_unicode_processor_factory(
self.description_encoding
)
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
@property
def dialect_description(self):
return self.name + "+" + self.driver
@classmethod
def get_pool_class(cls, url):
return getattr(cls, 'poolclass', pool.QueuePool)
def initialize(self, connection):
try:
self.server_version_info = \
self._get_server_version_info(connection)
except NotImplementedError:
self.server_version_info = None
try:
self.default_schema_name = \
self._get_default_schema_name(connection)
except NotImplementedError:
self.default_schema_name = None
try:
self.default_isolation_level = \
self.get_isolation_level(connection.connection)
except NotImplementedError:
self.default_isolation_level = None
self.returns_unicode_strings = self._check_unicode_returns(connection)
if self.description_encoding is not None and \
self._check_unicode_description(connection):
self._description_decoder = self.description_encoding = None
self.do_rollback(connection.connection)
def on_connect(self):
"""return a callable which sets up a newly created DBAPI connection.
This is used to set dialect-wide per-connection options such as
isolation modes, unicode modes, etc.
If a callable is returned, it will be assembled into a pool listener
that receives the direct DBAPI connection, with all wrappers removed.
If None is returned, no listener will be generated.
"""
return None
def _check_unicode_returns(self, connection, additional_tests=None):
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
if self.positional:
parameters = self.execute_sequence_format()
else:
parameters = {}
def check_unicode(test):
statement = cast_to(
expression.select([test]).compile(dialect=self))
try:
cursor = connection.connection.cursor()
connection._cursor_execute(cursor, statement, parameters)
row = cursor.fetchone()
cursor.close()
except exc.DBAPIError as de:
# note that _cursor_execute() will have closed the cursor
# if an exception is thrown.
util.warn("Exception attempting to "
"detect unicode returns: %r" % de)
return False
else:
return isinstance(row[0], util.text_type)
tests = [
# detect plain VARCHAR
expression.cast(
expression.literal_column("'test plain returns'"),
sqltypes.VARCHAR(60)
),
# detect if there's an NVARCHAR type with different behavior
# available
expression.cast(
expression.literal_column("'test unicode returns'"),
sqltypes.Unicode(60)
),
]
if additional_tests:
tests += additional_tests
results = set([check_unicode(test) for test in tests])
if results.issuperset([True, False]):
return "conditional"
else:
return results == set([True])
def _check_unicode_description(self, connection):
# all DBAPIs on Py2K return cursor.description as encoded,
# until pypy2.1beta2 with sqlite, so let's just check it -
# it's likely others will start doing this too in Py2k.
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
cursor = connection.connection.cursor()
try:
cursor.execute(
cast_to(
expression.select([
expression.literal_column("'x'").label("some_label")
]).compile(dialect=self)
)
)
return isinstance(cursor.description[0][0], util.text_type)
finally:
cursor.close()
def type_descriptor(self, typeobj):
"""Provide a database-specific :class:`.TypeEngine` object, given
the generic object which comes from the types module.
This method looks for a dictionary called
``colspecs`` as a class or instance-level variable,
and passes on to :func:`.types.adapt_type`.
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
def reflecttable(
self, connection, table, include_columns, exclude_columns):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(table, include_columns, exclude_columns)
def get_pk_constraint(self, conn, table_name, schema=None, **kw):
"""Compatibility method, adapts the result of get_primary_keys()
for those dialects which don't implement get_pk_constraint().
"""
return {
'constrained_columns':
self.get_primary_keys(conn, table_name,
schema=schema, **kw)
}
def validate_identifier(self, ident):
if len(ident) > self.max_identifier_length:
raise exc.IdentifierError(
"Identifier '%s' exceeds maximum length of %d characters" %
(ident, self.max_identifier_length)
)
def connect(self, *cargs, **cparams):
return self.dbapi.connect(*cargs, **cparams)
def create_connect_args(self, url):
opts = url.translate_connect_args()
opts.update(url.query)
return [[], opts]
def set_engine_execution_options(self, engine, opts):
if 'isolation_level' in opts:
isolation_level = opts['isolation_level']
@event.listens_for(engine, "engine_connect")
def set_isolation(connection, branch):
if not branch:
self._set_connection_isolation(connection, isolation_level)
if 'schema_translate_map' in opts:
getter = schema._schema_getter(opts['schema_translate_map'])
engine.schema_for_object = getter
@event.listens_for(engine, "engine_connect")
def set_schema_translate_map(connection, branch):
connection.schema_for_object = getter
def set_connection_execution_options(self, connection, opts):
if 'isolation_level' in opts:
self._set_connection_isolation(connection, opts['isolation_level'])
if 'schema_translate_map' in opts:
getter = schema._schema_getter(opts['schema_translate_map'])
connection.schema_for_object = getter
def _set_connection_isolation(self, connection, level):
if connection.in_transaction():
util.warn(
"Connection is already established with a Transaction; "
"setting isolation_level may implicitly rollback or commit "
"the existing transaction, or have no effect until "
"next transaction")
self.set_isolation_level(connection.connection, level)
connection.connection._connection_record.\
finalize_callback.append(self.reset_isolation_level)
def do_begin(self, dbapi_connection):
pass
def do_rollback(self, dbapi_connection):
dbapi_connection.rollback()
def do_commit(self, dbapi_connection):
dbapi_connection.commit()
def do_close(self, dbapi_connection):
dbapi_connection.close()
def create_xid(self):
"""Create a random two-phase transaction ID.
This id will be passed to do_begin_twophase(), do_rollback_twophase(),
do_commit_twophase(). Its format is unspecified.
"""
return "_sa_%032x" % random.randint(0, 2 ** 128)
def do_savepoint(self, connection, name):
connection.execute(expression.SavepointClause(name))
def do_rollback_to_savepoint(self, connection, name):
connection.execute(expression.RollbackToSavepointClause(name))
def do_release_savepoint(self, connection, name):
connection.execute(expression.ReleaseSavepointClause(name))
def do_executemany(self, cursor, statement, parameters, context=None):
cursor.executemany(statement, parameters)
def do_execute(self, cursor, statement, parameters, context=None):
cursor.execute(statement, parameters)
def do_execute_no_params(self, cursor, statement, context=None):
cursor.execute(statement)
def is_disconnect(self, e, connection, cursor):
return False
def reset_isolation_level(self, dbapi_conn):
# default_isolation_level is read from the first connection
# after the initial set of 'isolation_level', if any, so is
# the configured default of this dialect.
self.set_isolation_level(dbapi_conn, self.default_isolation_level)
class StrCompileDialect(DefaultDialect):
statement_compiler = compiler.StrSQLCompiler
ddl_compiler = compiler.DDLCompiler
type_compiler = compiler.StrSQLTypeCompiler
preparer = compiler.IdentifierPreparer
supports_sequences = True
sequences_optional = True
preexecute_autoincrement_sequences = False
implicit_returning = False
supports_native_boolean = True
supports_simple_order_by_label = True
class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert = False
isupdate = False
isdelete = False
is_crud = False
is_text = False
isddl = False
executemany = False
compiled = None
statement = None
result_column_struct = None
returned_defaults = None
_is_implicit_returning = False
_is_explicit_returning = False
# a hook for SQLite's translation of
# result column names
_translate_colname = None
@classmethod
def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl):
"""Initialize execution context for a DDLElement construct."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.compiled = compiled = compiled_ddl
self.isddl = True
self.execution_options = compiled.execution_options
if connection._execution_options:
self.execution_options = dict(self.execution_options)
self.execution_options.update(connection._execution_options)
if not dialect.supports_unicode_statements:
self.unicode_statement = util.text_type(compiled)
self.statement = dialect._encoder(self.unicode_statement)[0]
else:
self.statement = self.unicode_statement = util.text_type(compiled)
self.cursor = self.create_cursor()
self.compiled_parameters = []
if dialect.positional:
self.parameters = [dialect.execute_sequence_format()]
else:
self.parameters = [{}]
return self
@classmethod
def _init_compiled(cls, dialect, connection, dbapi_connection,
compiled, parameters):
"""Initialize execution context for a Compiled construct."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.compiled = compiled
# this should be caught in the engine before
# we get here
assert compiled.can_execute
self.execution_options = compiled.execution_options.union(
connection._execution_options)
self.result_column_struct = (
compiled._result_columns, compiled._ordered_columns,
compiled._textual_ordered_columns)
self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
self.statement = self.unicode_statement.encode(
self.dialect.encoding)
else:
self.statement = self.unicode_statement
self.isinsert = compiled.isinsert
self.isupdate = compiled.isupdate
self.isdelete = compiled.isdelete
self.is_text = compiled.isplaintext
if not parameters:
self.compiled_parameters = [compiled.construct_params()]
else:
self.compiled_parameters = \
[compiled.construct_params(m, _group_number=grp) for
grp, m in enumerate(parameters)]
self.executemany = len(parameters) > 1
self.cursor = self.create_cursor()
if self.isinsert or self.isupdate or self.isdelete:
self.is_crud = True
self._is_explicit_returning = bool(compiled.statement._returning)
self._is_implicit_returning = bool(
compiled.returning and not compiled.statement._returning)
if self.compiled.insert_prefetch or self.compiled.update_prefetch:
if self.executemany:
self._process_executemany_defaults()
else:
self._process_executesingle_defaults()
processors = compiled._bind_processors
# Convert the dictionary of bind parameter values
# into a dict or list to be sent to the DBAPI's
# execute() or executemany() method.
parameters = []
if dialect.positional:
for compiled_params in self.compiled_parameters:
param = []
for key in self.compiled.positiontup:
if key in processors:
param.append(processors[key](compiled_params[key]))
else:
param.append(compiled_params[key])
parameters.append(dialect.execute_sequence_format(param))
else:
encode = not dialect.supports_unicode_statements
for compiled_params in self.compiled_parameters:
if encode:
param = dict(
(
dialect._encoder(key)[0],
processors[key](compiled_params[key])
if key in processors
else compiled_params[key]
)
for key in compiled_params
)
else:
param = dict(
(
key,
processors[key](compiled_params[key])
if key in processors
else compiled_params[key]
)
for key in compiled_params
)
parameters.append(param)
self.parameters = dialect.execute_sequence_format(parameters)
return self
@classmethod
def _init_statement(cls, dialect, connection, dbapi_connection,
statement, parameters):
"""Initialize execution context for a string SQL statement."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.is_text = True
# plain text statement
self.execution_options = connection._execution_options
if not parameters:
if self.dialect.positional:
self.parameters = [dialect.execute_sequence_format()]
else:
self.parameters = [{}]
elif isinstance(parameters[0], dialect.execute_sequence_format):
self.parameters = parameters
elif isinstance(parameters[0], dict):
if dialect.supports_unicode_statements:
self.parameters = parameters
else:
self.parameters = [
dict((dialect._encoder(k)[0], d[k]) for k in d)
for d in parameters
] or [{}]
else:
self.parameters = [dialect.execute_sequence_format(p)
for p in parameters]
self.executemany = len(parameters) > 1
if not dialect.supports_unicode_statements and \
isinstance(statement, util.text_type):
self.unicode_statement = statement
self.statement = dialect._encoder(statement)[0]
else:
self.statement = self.unicode_statement = statement
self.cursor = self.create_cursor()
return self
@classmethod
def _init_default(cls, dialect, connection, dbapi_connection):
"""Initialize execution context for a ColumnDefault construct."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.execution_options = connection._execution_options
self.cursor = self.create_cursor()
return self
@util.memoized_property
def engine(self):
return self.root_connection.engine
@util.memoized_property
def postfetch_cols(self):
return self.compiled.postfetch
@util.memoized_property
def prefetch_cols(self):
if self.isinsert:
return self.compiled.insert_prefetch
elif self.isupdate:
return self.compiled.update_prefetch
else:
return ()
@util.memoized_property
def returning_cols(self):
self.compiled.returning
@util.memoized_property
def no_parameters(self):
return self.execution_options.get("no_parameters", False)
@util.memoized_property
def should_autocommit(self):
autocommit = self.execution_options.get('autocommit',
not self.compiled and
self.statement and
expression.PARSE_AUTOCOMMIT
or False)
if autocommit is expression.PARSE_AUTOCOMMIT:
return self.should_autocommit_text(self.unicode_statement)
else:
return autocommit
def _execute_scalar(self, stmt, type_):
"""Execute a string statement on the current cursor, returning a
scalar result.
Used to fire off sequences, default phrases, and "select lastrowid"
types of statements individually or in the context of a parent INSERT
or UPDATE statement.
"""
conn = self.root_connection
if isinstance(stmt, util.text_type) and \
not self.dialect.supports_unicode_statements:
stmt = self.dialect._encoder(stmt)[0]
if self.dialect.positional:
default_params = self.dialect.execute_sequence_format()
else:
default_params = {}
conn._cursor_execute(self.cursor, stmt, default_params, context=self)
r = self.cursor.fetchone()[0]
if type_ is not None:
# apply type post processors to the result
proc = type_._cached_result_processor(
self.dialect,
self.cursor.description[0][1]
)
if proc:
return proc(r)
return r
@property
def connection(self):
return self.root_connection._branch()
def should_autocommit_text(self, statement):
return AUTOCOMMIT_REGEXP.match(statement)
def _use_server_side_cursor(self):
if not self.dialect.supports_server_side_cursors:
return False
if self.dialect.server_side_cursors:
use_server_side = \
self.execution_options.get('stream_results', True) and (
(self.compiled and isinstance(self.compiled.statement,
expression.Selectable)
or
(
(not self.compiled or
isinstance(self.compiled.statement,
expression.TextClause))
and self.statement and SERVER_SIDE_CURSOR_RE.match(
self.statement))
)
)
else:
use_server_side = \
self.execution_options.get('stream_results', False)
return use_server_side
def create_cursor(self):
if self._use_server_side_cursor():
self._is_server_side = True
return self.create_server_side_cursor()
else:
self._is_server_side = False
return self._dbapi_connection.cursor()
def create_server_side_cursor(self):
raise NotImplementedError()
def pre_exec(self):
pass
def post_exec(self):
pass
def get_result_processor(self, type_, colname, coltype):
"""Return a 'result processor' for a given type as present in
cursor.description.
This has a default implementation that dialects can override
for context-sensitive result type handling.
"""
return type_._cached_result_processor(self.dialect, coltype)
def get_lastrowid(self):
"""return self.cursor.lastrowid, or equivalent, after an INSERT.
This may involve calling special cursor functions,
issuing a new SELECT on the cursor (or a new one),
or returning a stored value that was
calculated within post_exec().
This function will only be called for dialects
which support "implicit" primary key generation,
keep preexecute_autoincrement_sequences set to False,
and when no explicit id value was bound to the
statement.
The function is called once, directly after
post_exec() and before the transaction is committed
or ResultProxy is generated. If the post_exec()
method assigns a value to `self._lastrowid`, the
value is used in place of calling get_lastrowid().
Note that this method is *not* equivalent to the
``lastrowid`` method on ``ResultProxy``, which is a
direct proxy to the DBAPI ``lastrowid`` accessor
in all cases.
"""
return self.cursor.lastrowid
def handle_dbapi_exception(self, e):
pass
def get_result_proxy(self):
if self._is_server_side:
return result.BufferedRowResultProxy(self)
else:
return result.ResultProxy(self)
@property
def rowcount(self):
return self.cursor.rowcount
def supports_sane_rowcount(self):
return self.dialect.supports_sane_rowcount
def supports_sane_multi_rowcount(self):
return self.dialect.supports_sane_multi_rowcount
def _setup_crud_result_proxy(self):
if self.isinsert and \
not self.executemany:
if not self._is_implicit_returning and \
not self.compiled.inline and \
self.dialect.postfetch_lastrowid:
self._setup_ins_pk_from_lastrowid()
elif not self._is_implicit_returning:
self._setup_ins_pk_from_empty()
result = self.get_result_proxy()
if self.isinsert:
if self._is_implicit_returning:
row = result.fetchone()
self.returned_defaults = row
self._setup_ins_pk_from_implicit_returning(row)
result._soft_close(_autoclose_connection=False)
result._metadata = None
elif not self._is_explicit_returning:
result._soft_close(_autoclose_connection=False)
result._metadata = None
elif self.isupdate and self._is_implicit_returning:
row = result.fetchone()
self.returned_defaults = row
result._soft_close(_autoclose_connection=False)
result._metadata = None
elif result._metadata is None:
# no results, get rowcount
# (which requires open cursor on some drivers
# such as kintersbasdb, mxodbc)
result.rowcount
result._soft_close(_autoclose_connection=False)
return result
def _setup_ins_pk_from_lastrowid(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
lastrowid = self.get_lastrowid()
if lastrowid is not None:
autoinc_col = table._autoincrement_column
if autoinc_col is not None:
# apply type post processors to the lastrowid
proc = autoinc_col.type._cached_result_processor(
self.dialect, None)
if proc is not None:
lastrowid = proc(lastrowid)
self.inserted_primary_key = [
lastrowid if c is autoinc_col else
compiled_params.get(key_getter(c), None)
for c in table.primary_key
]
else:
# don't have a usable lastrowid, so
# do the same as _setup_ins_pk_from_empty
self.inserted_primary_key = [
compiled_params.get(key_getter(c), None)
for c in table.primary_key
]
def _setup_ins_pk_from_empty(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
self.inserted_primary_key = [
compiled_params.get(key_getter(c), None)
for c in table.primary_key
]
def _setup_ins_pk_from_implicit_returning(self, row):
if row is None:
self.inserted_primary_key = None
return
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
self.inserted_primary_key = [
row[col] if value is None else value
for col, value in [
(col, compiled_params.get(key_getter(col), None))
for col in table.primary_key
]
]
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
bool(self.compiled.postfetch)
def set_input_sizes(self, translate=None, exclude_types=None):
"""Given a cursor and ClauseParameters, call the appropriate
style of ``setinputsizes()`` on the cursor, using DB-API types
from the bind parameter's ``TypeEngine`` objects.
This method only called by those dialects which require it,
currently cx_oracle.
"""
if not hasattr(self.compiled, 'bind_names'):
return
types = dict(
(self.compiled.bind_names[bindparam], bindparam.type)
for bindparam in self.compiled.bind_names)
if self.dialect.positional:
inputsizes = []
for key in self.compiled.positiontup:
typeengine = types[key]
dbtype = typeengine.dialect_impl(self.dialect).\
get_dbapi_type(self.dialect.dbapi)
if dbtype is not None and \
(not exclude_types or dbtype not in exclude_types):
inputsizes.append(dbtype)
try:
self.cursor.setinputsizes(*inputsizes)
except BaseException as e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self)
else:
inputsizes = {}
for key in self.compiled.bind_names.values():
typeengine = types[key]
dbtype = typeengine.dialect_impl(self.dialect).\
get_dbapi_type(self.dialect.dbapi)
if dbtype is not None and \
(not exclude_types or dbtype not in exclude_types):
if translate:
key = translate.get(key, key)
if not self.dialect.supports_unicode_binds:
key = self.dialect._encoder(key)[0]
inputsizes[key] = dbtype
try:
self.cursor.setinputsizes(**inputsizes)
except BaseException as e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self)
def _exec_default(self, default, type_):
if default.is_sequence:
return self.fire_sequence(default, type_)
elif default.is_callable:
return default.arg(self)
elif default.is_clause_element:
# TODO: expensive branching here should be
# pulled into _exec_scalar()
conn = self.connection
c = expression.select([default.arg]).compile(bind=conn)
return conn._execute_compiled(c, (), {}).scalar()
else:
return default.arg
def get_insert_default(self, column):
if column.default is None:
return None
else:
return self._exec_default(column.default, column.type)
def get_update_default(self, column):
if column.onupdate is None:
return None
else:
return self._exec_default(column.onupdate, column.type)
def _process_executemany_defaults(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
scalar_defaults = {}
insert_prefetch = self.compiled.insert_prefetch
update_prefetch = self.compiled.update_prefetch
# pre-determine scalar Python-side defaults
# to avoid many calls of get_insert_default()/
# get_update_default()
for c in insert_prefetch:
if c.default and c.default.is_scalar:
scalar_defaults[c] = c.default.arg
for c in update_prefetch:
if c.onupdate and c.onupdate.is_scalar:
scalar_defaults[c] = c.onupdate.arg
for param in self.compiled_parameters:
self.current_parameters = param
for c in insert_prefetch:
if c in scalar_defaults:
val = scalar_defaults[c]
else:
val = self.get_insert_default(c)
if val is not None:
param[key_getter(c)] = val
for c in update_prefetch:
if c in scalar_defaults:
val = scalar_defaults[c]
else:
val = self.get_update_default(c)
if val is not None:
param[key_getter(c)] = val
del self.current_parameters
def _process_executesingle_defaults(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
self.current_parameters = compiled_parameters = \
self.compiled_parameters[0]
for c in self.compiled.insert_prefetch:
if c.default and \
not c.default.is_sequence and c.default.is_scalar:
val = c.default.arg
else:
val = self.get_insert_default(c)
if val is not None:
compiled_parameters[key_getter(c)] = val
for c in self.compiled.update_prefetch:
val = self.get_update_default(c)
if val is not None:
compiled_parameters[key_getter(c)] = val
del self.current_parameters
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
|
jicksy/oneanddone_test
|
refs/heads/master
|
vendor-local/lib/python/requests/packages/charade/latin1prober.py
|
50
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe
from .compat import wrap_ord
FREQ_CAT_NUM = 4
UDF = 0 # undefined
OTH = 1 # other
ASC = 2 # ascii capital letter
ASS = 3 # ascii small letter
ACV = 4 # accent capital vowel
ACO = 5 # accent capital other
ASV = 6 # accent small vowel
ASO = 7 # accent small other
CLASS_NUM = 8 # total classes
Latin1_CharToClass = (
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
)
# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
0, 3, 3, 3, 1, 1, 3, 3, # ASS
0, 3, 3, 3, 1, 2, 1, 2, # ACV
0, 3, 3, 3, 3, 3, 3, 3, # ACO
0, 3, 1, 3, 1, 1, 1, 3, # ASV
0, 3, 1, 3, 1, 1, 3, 3, # ASO
)
class Latin1Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self.reset()
def reset(self):
self._mLastCharClass = OTH
self._mFreqCounter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
def get_charset_name(self):
return "windows-1252"
def feed(self, aBuf):
aBuf = self.filter_with_english_letters(aBuf)
for c in aBuf:
charClass = Latin1_CharToClass[wrap_ord(c)]
freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ charClass]
if freq == 0:
self._mState = eNotMe
break
self._mFreqCounter[freq] += 1
self._mLastCharClass = charClass
return self.get_state()
def get_confidence(self):
if self.get_state() == eNotMe:
return 0.01
total = sum(self._mFreqCounter)
if total < 0.01:
confidence = 0.0
else:
confidence = ((float(self._mFreqCounter[3]) / total)
- (self._mFreqCounter[1] * 20.0 / total))
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
confidence = confidence * 0.5
return confidence
|
Dandandan/wikiprogramming
|
refs/heads/master
|
jsrepl/build/extern/python/closured/lib/python2.7/unittest/runner.py
|
109
|
"""Running tests"""
import sys
import time
from . import result
from .signals import registerResult
__unittest = True
class _WritelnDecorator(object):
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
if attr in ('stream', '__getstate__'):
raise AttributeError(attr)
return getattr(self.stream,attr)
def writeln(self, arg=None):
if arg:
self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
class TextTestResult(result.TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
super(TextTestResult, self).__init__()
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
def getDescription(self, test):
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def startTest(self, test):
super(TextTestResult, self).startTest(test)
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addSuccess(self, test):
super(TextTestResult, self).addSuccess(test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
self.stream.flush()
def addError(self, test, err):
super(TextTestResult, self).addError(test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
self.stream.flush()
def addFailure(self, test, err):
super(TextTestResult, self).addFailure(test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
self.stream.flush()
def addSkip(self, test, reason):
super(TextTestResult, self).addSkip(test, reason)
if self.showAll:
self.stream.writeln("skipped {0!r}".format(reason))
elif self.dots:
self.stream.write("s")
self.stream.flush()
def addExpectedFailure(self, test, err):
super(TextTestResult, self).addExpectedFailure(test, err)
if self.showAll:
self.stream.writeln("expected failure")
elif self.dots:
self.stream.write("x")
self.stream.flush()
def addUnexpectedSuccess(self, test):
super(TextTestResult, self).addUnexpectedSuccess(test)
if self.showAll:
self.stream.writeln("unexpected success")
elif self.dots:
self.stream.write("u")
self.stream.flush()
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour,self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
class TextTestRunner(object):
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
resultclass = TextTestResult
def __init__(self, stream=sys.stderr, descriptions=True, verbosity=1,
failfast=False, buffer=False, resultclass=None):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
if resultclass is not None:
self.resultclass = resultclass
def _makeResult(self):
return self.resultclass(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
registerResult(result)
result.failfast = self.failfast
result.buffer = self.buffer
startTime = time.time()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
try:
test(result)
finally:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
if hasattr(result, 'separator2'):
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
expectedFails = unexpectedSuccesses = skipped = 0
try:
results = map(len, (result.expectedFailures,
result.unexpectedSuccesses,
result.skipped))
except AttributeError:
pass
else:
expectedFails, unexpectedSuccesses, skipped = results
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = map(len, (result.failures, result.errors))
if failed:
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
else:
self.stream.write("OK")
if skipped:
infos.append("skipped=%d" % skipped)
if expectedFails:
infos.append("expected failures=%d" % expectedFails)
if unexpectedSuccesses:
infos.append("unexpected successes=%d" % unexpectedSuccesses)
if infos:
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
return result
|
muelli/mysql-proxy-python
|
refs/heads/master
|
lib/rw_splitting.py
|
2
|
# LICENSE BEGIN
#
# Copyright (c) 2010 Ysj.Ray
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
## LICENSE END
#--
#- a flexible statement based load balancer with connection pooling
#-
#- * build a connection pool of min_idle_connections for each back and maintain
#- its size
#- *
#-
#-
import proxy.commands as commands
import proxy.tokenizer as tokenizer
import proxy.balance as lb
import proxy.auto_config as auto_config
class rwsplit_config(object):
def __init__(self, value=None):
if type(value) == dict:
for k, v in value.items():
setattr(self, k, v)
def check_rwsplit_config(fun):
# Here the first element of args must be proxy object.
def wrapper(*args, **kwds):
proxy = args[0]
if not hasattr(proxy.globals.config, 'rwsplit'):
proxy.globals.config.rwsplit = rwsplit_config({
'min_idle_connections' : 4,
'max_idle_connections' : 8,
'is_debug' : True
})
return fun(*args, **kwds)
return wrapper
#--
#- read/write splitting ss all non-transactional SELECTs to the slaves
#-
#- is_in_transaction tracks the state of the transactions
#- if this was a SELECT SQL_CALC_FOUND_ROWS +. stay on the same connections
#--
#- get a connection to a back
#-
#- as long as we don't have enough connections in the pool, create new connections
#-
@check_rwsplit_config
def connect_server(proxy):
proxy.globals.is_in_transaction = False
proxy.globals.is_in_select_calc_found_rows = False
is_debug = proxy.globals.config.rwsplit.is_debug
#- make sure that we connect to each backend at least ones to
#- keep the connections to the servers alive
#-
#- on read_query we can switch the backends again to another backend
if is_debug :
print
print "[connect_server] %s" % proxy.connection.client.src.name
rw_ndx = -1
#- init all backends
for i, s in enumerate(proxy.globals.backends):
pool = s.pool #- we don't have a username yet, try to find a connections which is idling
try:
cur_idle = pool.users[""].cur_idle_connections
except:
cur_idle = 0
pool.min_idle_connections = proxy.globals.config.rwsplit.min_idle_connections
pool.max_idle_connections = proxy.globals.config.rwsplit.max_idle_connections
if is_debug :
print " ["+ str(i) +"].connected_clients = " + str(s.connected_clients)
print " ["+ str(i) +"].pool.cur_idle = " + str(cur_idle)
print " ["+ str(i) +"].pool.max_idle = " + str(pool.max_idle_connections)
print " ["+ str(i) +"].pool.min_idle = " + str(pool.min_idle_connections)
print " ["+ str(i) +"].type = " + str(s.type)
print " ["+ str(i) +"].state = " + str(s.state)
#- prefer connections to the master
if s.type == proxy.BACKEND_TYPE_RW and\
s.state != proxy.BACKEND_STATE_DOWN and\
cur_idle < pool.min_idle_connections :
if is_debug:
print ' found a backend rw, select backend:', i
proxy.connection.backend_ndx = i
break
elif s.type == proxy.BACKEND_TYPE_RO and\
s.state != proxy.BACKEND_STATE_DOWN and\
cur_idle < pool.min_idle_connections :
if is_debug:
print ' found a backend ro, select backend:', i
proxy.connection.backend_ndx = i
break
elif s.type == proxy.BACKEND_TYPE_RW and\
s.state != proxy.BACKEND_STATE_DOWN and\
rw_ndx == -1:
rw_ndx = i
if proxy.connection.backend_ndx == -1:
if is_debug :
print(" backend_ndx==-1, [" + str(rw_ndx) + "] taking master as default")
proxy.connection.backend_ndx = rw_ndx
#- pick a random backend
#- we someone have to skip DOWN backends
#- ok, did we got a backend ?
if proxy.connection.server :
if is_debug :
print " using pooled connection from: %s, stay" % proxy.connection.backend_ndx
#- stay with it
return proxy.PROXY_IGNORE_RESULT
if is_debug :
print " [%s] idle-conns below min-idle" % proxy.connection.backend_ndx
if is_debug :
print " [Finally choose backend_ndx: %s]" % proxy.connection.backend_ndx
#- open a new connection
#--
#- put the successfully authed connection into the connection pool
#-
#- @param auth the context information for the auth
#-
#- auth.packet is the packet
@check_rwsplit_config
def read_auth_result( proxy, auth ):
is_debug = proxy.globals.config.rwsplit.is_debug
if is_debug :
print "[read_auth_result] %s" % proxy.connection.client.src.name
if ord((auth.packet)[0]) == proxy.MYSQLD_PACKET_OK :
#- auth was fine, disconnect from the server
proxy.connection.backend_ndx = -1
elif ord((auth.packet)[0]) == proxy.MYSQLD_PACKET_ERR :
#- auth failed
pass
#--
#- read/write splitting
@check_rwsplit_config
def read_query(proxy, packet):
is_debug = proxy.globals.config.rwsplit.is_debug
cmd = commands.parse(packet)
c = proxy.connection.client
r = auto_config.handle(cmd, proxy)
if r : return r
tokens = None
#norm_query
#- looks like we have to forward this statement to a back
if is_debug :
print "[read_query] " + proxy.connection.client.src.name
print " current back = %s" % proxy.connection.backend_ndx
print " client default db = " + c.default_db
print " client username = " + c.username
if cmd.type == proxy.COM_QUERY :
print " query = " + cmd.query
if cmd.type == proxy.COM_QUIT :
#- don't s COM_QUIT to the backend. We manage the connection
#- in all aspects.
proxy.response = {
'type' : proxy.MYSQLD_PACKET_OK,
}
if is_debug :
print " (QUIT) current back = %s" % proxy.connection.backend_ndx
return proxy.PROXY_SEND_RESULT
proxy.queries.append(1, packet, True)
#- read/write splitting
#-
#- s all non-transactional SELECTs to a slave
if not proxy.globals.is_in_transaction and cmd.type == proxy.COM_QUERY:
tokens = tokens or tokenizer.tokenize(cmd.query)
stmt = tokenizer.first_stmt_token(tokens)
if stmt.token_name == "TK_SQL_SELECT" :
proxy.globals.is_in_select_calc_found_rows = False
is_insert_id = False
for token in tokens:
#- SQL_CALC_FOUND_ROWS + FOUND_ROWS() have to be executed
#- on the same connection
#- print "token: " + token.token_name
#- print " val: " + token.text
if not proxy.globals.is_in_select_calc_found_rows and \
token.token_name == "TK_SQL_SQL_CALC_FOUND_ROWS" :
proxy.globals.is_in_select_calc_found_rows = True
elif not is_insert_id and token.token_name == "TK_LITERAL" :
utext = token.text.upper()
if utext in ("LAST_INSERT_ID", "@@INSERT_ID"):
is_insert_id = True
#- we found the two special token, we can't find more
if is_insert_id and proxy.globals.is_in_select_calc_found_rows :
break
#- if we ask for the last-insert-id we have to ask it on the original
#- connection
if not is_insert_id :
backend_ndx = lb.idle_ro(proxy)
if is_debug:
print ' no is_insert_id, select', backend_ndx
if backend_ndx >= 0 :
proxy.connection.backend_ndx = backend_ndx
else:
print " found a SELECT LAST_INSERT_ID(), staying on the same back"
#- no back selected yet, pick a master
if proxy.connection.backend_ndx == -1:
#- we don't have a back right now
#-
#- let's pick a master as a good default
#-
proxy.connection.backend_ndx = lb.idle_failsafe_rw(proxy)
if is_debug:
print ' backend_ndx = -1(2), select', proxy.connection.backend_ndx
#- by now we should have a back
#-
#- in case the master is down, we have to close the client connections
#- otherwise we can go on
if proxy.connection.backend_ndx == -1:
return proxy.PROXY_SEND_QUERY
s = proxy.connection.server
#- if client and server db don't match, adjust the server-side
#-
#- skip it if we s a INIT_DB anyway
if cmd.type != proxy.COM_INIT_DB and\
c.default_db and c.default_db != s.default_db :
print " server default db: " + s.default_db
print " client default db: " + c.default_db
print " syncronizing"
proxy.queries.prepend(2, chr(proxy.COM_INIT_DB) + c.default_db, True )
#- s to master
if is_debug :
if proxy.connection.backend_ndx >= 0 :
b = proxy.globals.backends[proxy.connection.backend_ndx]
print " sing to backend : " + b.dst.name
print " is_slave : " + str(b.type == proxy.BACKEND_TYPE_RO)
print " server default db: " + s.default_db
print " server username : " + s.username
print " in_trans : " + str(proxy.globals.is_in_transaction)
print " in_calc_found : " + str(proxy.globals.is_in_select_calc_found_rows)
print " COM_QUERY : " + str(cmd.type == proxy.COM_QUERY)
return proxy.PROXY_SEND_QUERY
#--
#- as long as we are in a transaction keep the connection
#- otherwise release it so another client can use it
@check_rwsplit_config
def read_query_result(proxy, inj ):
is_debug = proxy.globals.config.rwsplit.is_debug
res = inj.resultset
flags = res.flags
if inj.id != 1 :
#- ignore the result of the USE <default_db>
#- the DB might not exist on the back, what do do ?
#-
if inj.id == 2 :
#- the injected INIT_DB failed as the slave doesn't have this DB
#- or doesn't have permissions to read from it
if res.query_status == proxy.MYSQLD_PACKET_ERR :
proxy.queries.reset()
proxy.response = {
'type' : proxy.MYSQLD_PACKET_ERR,
'errmsg' : "can't change DB "+ proxy.connection.client.default_db +
" to on slave " + proxy.globals.backends[proxy.connection.backend_ndx].dst.name
}
return proxy.PROXY_SEND_RESULT
return proxy.PROXY_IGNORE_RESULT
proxy.globals.is_in_transaction = flags.in_trans
have_last_insert_id = (res.insert_id and (res.insert_id > 0))
if not proxy.globals.is_in_transaction and\
not proxy.globals.is_in_select_calc_found_rows and\
not have_last_insert_id :
#- release the back
if is_debug:
print 'in transaction, and in_select_calc_found_rows, and\
have_last_insert_id, backend_ndx=-1'
proxy.connection.backend_ndx = -1
elif is_debug :
print("(read_query_result) staying on the same back")
print(" in_trans : " + str(proxy.globals.is_in_transaction))
print(" in_calc_found : " + str(proxy.globals.is_in_select_calc_found_rows))
print(" have_insert_id : " + str(have_last_insert_id))
#--
#- close the connections if we have enough connections in the pool
#-
#- @return nil - close connection
#- IGNORE_RESULT - store connection in the pool
@check_rwsplit_config
def disconnect_client(proxy):
is_debug = proxy.globals.config.rwsplit.is_debug
if is_debug :
print("[disconnect_client] " + proxy.connection.client.src.name)
#- make sure we are disconnection from the connection
#- to move the connection into the pool
if is_debug:
print 'disconnect client, backend_ndx=-1'
proxy.connection.backend_ndx = -1
|
mbernasocchi/inasafe
|
refs/heads/develop
|
safe/gis/processing_tools.py
|
1
|
# coding=utf-8
"""Processing utilities and tools."""
import processing
from qgis.core import (
QgsApplication,
QgsFeatureRequest,
QgsProcessingContext,
QgsProcessingFeedback,
QgsProject)
from qgis.analysis import QgsNativeAlgorithms
__copyright__ = "Copyright 2018, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
def initialize_processing():
"""
Initializes processing, if it's not already been done
"""
# Required if running from command line
if not QgsApplication.processingRegistry().algorithms():
QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
processing.Processing.initialize()
def create_processing_context(feedback):
"""
Creates a default processing context
:param feedback: Linked processing feedback object
:type feedback: QgsProcessingFeedback
:return: Processing context
:rtype: QgsProcessingContext
"""
context = QgsProcessingContext()
context.setFeedback(feedback)
context.setProject(QgsProject.instance())
# skip Processing geometry checks - Inasafe has its own geometry validation
# routines which have already been used
context.setInvalidGeometryCheck(QgsFeatureRequest.GeometryNoCheck)
return context
def create_processing_feedback():
"""
Creates a default processing feedback object
:return: Processing feedback
:rtype: QgsProcessingFeedback
"""
return QgsProcessingFeedback()
|
rajashreer7/autotest-client-tests
|
refs/heads/master
|
linux-tools/prelink/prelink.py
|
3
|
#!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error, software_manager
sm = software_manager.SoftwareManager()
class prelink(test.test):
"""
Autotest module for testing basic functionality
of prelink
@author Athira Rajeev <atrajeev@in.ibm.com>
"""
version = 1
nfail = 0
path = ''
def initialize(self, test_path=''):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
for package in ['gcc', 'gcc-c++']:
if not sm.check_installed(package):
logging.debug("%s missing - trying to install", package)
sm.install(package)
ret_val = subprocess.Popen(['make', 'all'], cwd="%s/prelink" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.Popen(['./prelink.sh'], cwd="%s/prelink" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
|
easytaxibr/redash
|
refs/heads/master
|
redash/handlers/queries.py
|
2
|
from flask import request
from flask_restful import abort
from flask_login import login_required
import sqlparse
from funcy import distinct, take
from itertools import chain
from redash.handlers.base import routes, org_scoped_rule, paginate
from redash.handlers.query_results import run_query
from redash import models
from redash.permissions import require_permission, require_access, require_admin_or_owner, not_view_only, view_only, \
require_object_modify_permission, can_modify
from redash.handlers.base import BaseResource, get_object_or_404
from redash.utils import collect_parameters_from_request
@routes.route(org_scoped_rule('/api/queries/format'), methods=['POST'])
@login_required
def format_sql_query(org_slug=None):
arguments = request.get_json(force=True)
query = arguments.get("query", "")
return sqlparse.format(query, reindent=True, keyword_case='upper')
class QuerySearchResource(BaseResource):
@require_permission('view_query')
def get(self):
term = request.args.get('q', '')
return [q.to_dict(with_last_modified_by=False) for q in models.Query.search(term, self.current_user.groups)]
class QueryRecentResource(BaseResource):
@require_permission('view_query')
def get(self):
queries = models.Query.recent(self.current_user.groups, self.current_user.id)
recent = [d.to_dict(with_last_modified_by=False) for d in queries]
global_recent = []
if len(recent) < 10:
global_recent = [d.to_dict(with_last_modified_by=False) for d in models.Query.recent(self.current_user.groups)]
return take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
class QueryListResource(BaseResource):
@require_permission('create_query')
def post(self):
query_def = request.get_json(force=True)
data_source = models.DataSource.get_by_id_and_org(query_def.pop('data_source_id'), self.current_org)
require_access(data_source.groups, self.current_user, not_view_only)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
query_def.pop(field, None)
# If we already executed this query, save the query result reference
if 'latest_query_data_id' in query_def:
query_def['latest_query_data'] = query_def.pop('latest_query_data_id')
query_def['user'] = self.current_user
query_def['data_source'] = data_source
query_def['org'] = self.current_org
query = models.Query.create(**query_def)
self.record_event({
'action': 'create',
'object_id': query.id,
'object_type': 'query'
})
return query.to_dict()
@require_permission('view_query')
def get(self):
results = models.Query.all_queries(self.current_user.groups)
page = request.args.get('page', 1, type=int)
page_size = request.args.get('page_size', 25, type=int)
return paginate(results, page, page_size, lambda q: q.to_dict(with_stats=True, with_last_modified_by=False))
class MyQueriesResource(BaseResource):
@require_permission('view_query')
def get(self):
drafts = request.args.get('drafts') is not None
results = models.Query.by_user(self.current_user, drafts)
page = request.args.get('page', 1, type=int)
page_size = request.args.get('page_size', 25, type=int)
return paginate(results, page, page_size, lambda q: q.to_dict(with_stats=True, with_last_modified_by=False))
class QueryResource(BaseResource):
@require_permission('edit_query')
def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
query_def = request.get_json(force=True)
require_object_modify_permission(query, self.current_user)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by', 'org']:
query_def.pop(field, None)
if 'latest_query_data_id' in query_def:
query_def['latest_query_data'] = query_def.pop('latest_query_data_id')
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
query_def['last_modified_by'] = self.current_user
query_def['changed_by'] = self.current_user
try:
query.update_instance(**query_def)
except models.ConflictDetectedError:
abort(409)
# old_query = copy.deepcopy(query.to_dict())
# new_change = query.update_instance_tracked(changing_user=self.current_user, old_object=old_query, **query_def)
# abort(409) # HTTP 'Conflict' status code
result = query.to_dict(with_visualizations=True)
return result
@require_permission('view_query')
def get(self, query_id):
q = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(q.groups, self.current_user, view_only)
result = q.to_dict(with_visualizations=True)
result['can_edit'] = can_modify(q, self.current_user)
return result
# TODO: move to resource of its own? (POST /queries/{id}/archive)
def delete(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_admin_or_owner(query.user_id)
query.archive(self.current_user)
class QueryRefreshResource(BaseResource):
def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query.groups, self.current_user, not_view_only)
parameter_values = collect_parameters_from_request(request.args)
return run_query(query.data_source, parameter_values, query.query, query.id)
|
InfoAgeTech/django-core
|
refs/heads/master
|
django_core/views/mixins/csrf.py
|
1
|
from __future__ import unicode_literals
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
class CsrfExemptViewMixin(object):
"""Mixin for the csrf_exempt decorator."""
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return super(CsrfExemptViewMixin, self).dispatch(*args, **kwargs)
|
zhangtao11/scrapy
|
refs/heads/master
|
scrapy/squeue.py
|
144
|
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
warnings.warn("Module `scrapy.squeue` is deprecated, "
"use `scrapy.squeues` instead",
ScrapyDeprecationWarning, stacklevel=2)
from scrapy.squeues import *
|
showerst/openstates
|
refs/heads/master
|
openstates/az/__init__.py
|
1
|
import datetime
import lxml.html
from billy.utils.fulltext import text_after_line_numbers, pdfdata_to_text
from .bills import AZBillScraper
from .legislators import AZLegislatorScraper
from .committees import AZCommitteeScraper
from .events import AZEventScraper
metadata = dict(
name='Arizona',
abbreviation='az',
legislature_name='Arizona State Legislature',
legislature_url='http://www.azleg.gov/',
capitol_timezone='America/Denver',
chambers = {
'upper': {'name': 'Senate', 'title': 'Senator'},
'lower': {'name': 'House', 'title': 'Representative'},
},
terms = [
{'name': '49',
'sessions': [
'49th-1st-special',
'49th-2nd-special',
'49th-1st-regular',
'49th-3rd-special',
'49th-4th-special',
'49th-5th-special',
'49th-6th-special',
'49th-7th-special',
'49th-8th-special',
'49th-2nd-regular',
'49th-9th-special',
],
'start_year': 2009, 'end_year': 2010
},
{'name': '50',
'sessions': [
'50th-1st-special',
'50th-2nd-special',
'50th-3rd-special',
'50th-4th-special',
'50th-1st-regular',
'50th-2nd-regular',
],
'start_year': 2011, 'end_year': 2012
},
{'name': '51',
'sessions': [
'51st-1st-regular',
'51st-1st-special',
'51st-2nd-regular',
'51st-2nd-special',
],
'start_year': 2013, 'end_year': 2014
},
{
'name': '52',
'sessions': [
'52nd-1st-regular',
'52nd-1st-special',
'52nd-2nd-regular',
],
'start_year': 2015,
'end_year': 2016
},
{
'name': '53',
'sessions': [
'53rd-1st-regular',
],
'start_year': 2017,
'end_year': 2018
},
],
session_details={
'49th-1st-regular':
{'type': 'primary', 'session_id': 87,
'display_name': '49th Legislature, 1st Regular Session (2009)',
'start_date': datetime.date(2009, 1, 12),
'end_date': datetime.date(2009, 7, 1),
'_scraped_name': '2009 - Forty-ninth Legislature - First Regular Session',
},
'49th-1st-special':
{'type': 'special', 'session_id': 89,
'display_name': '49th Legislature, 1st Special Session (2009)',
'start_date': datetime.date(2009, 1, 28),
'end_date': datetime.date(2009, 1, 31),
'_scraped_name': '2009 - Forty-ninth Legislature - First Special Session',
},
'49th-2nd-special':
{'type': 'special', 'session_id': 90,
'display_name': '49th Legislature, 2nd Special Session (2009)',
'start_date': datetime.date(2009, 5, 21),
'end_date': datetime.date(2009, 5, 27),
'_scraped_name': '2009 - Forty-ninth Legislature - Second Special Session',
},
'49th-3rd-special':
{'type': 'special', 'session_id': 91,
'display_name': '49th Legislature, 3rd Special Session (2009)',
'start_date': datetime.date(2009, 7, 6),
'end_date': datetime.date(2009, 8, 25),
'_scraped_name': '2009 - Forty-ninth Legislature - Third Special Session',
},
'49th-4th-special':
{'type': 'special', 'session_id': 92,
'display_name': '49th Legislature, 4th Special Session (2009)',
'start_date': datetime.date(2009, 11, 17),
'end_date': datetime.date(2009, 11, 23),
'_scraped_name': '2009 - Forty-ninth Legislature - Fourth Special Session',
},
'49th-5th-special':
{'type': 'special', 'session_id': 94,
'display_name': '49th Legislature, 5th Special Session (2009)',
'start_date': datetime.date(2009, 12, 17),
'end_date': datetime.date(2009, 12, 19),
'_scraped_name': '2009 - Forty-ninth Legislature - Fifth Special Session',
},
'49th-6th-special':
{'type': 'special', 'session_id': 95,
'display_name': '49th Legislature, 6th Special Session (2010)',
'start_date': datetime.date(2010, 2, 1),
'end_date': datetime.date(2010, 2, 11),
'_scraped_name': '2010 - Forty-ninth Legislature - Sixth Special Session',
},
'49th-7th-special':
{'type': 'special', 'session_id': 96,
'display_name': '49th Legislature, 7th Special Session (2010)',
'start_date': datetime.date(2010, 3, 8),
'end_date': datetime.date(2010, 3, 16),
'_scraped_name': '2010 - Forty-ninth Legislature - Seventh Special Session',
},
'49th-8th-special':
{'type': 'special', 'session_id': 101,
'display_name': '49th Legislature, 8th Special Session (2010)',
'start_date': datetime.date(2010, 3, 29),
'end_date': datetime.date(2010, 4, 1),
'_scraped_name': '2010 - Forty-ninth Legislature - Eighth Special Session',
},
'49th-9th-special':
{'type': 'special', 'session_id': 103,
'display_name': '49th Legislature, 9th Special Session (2010)',
'start_date': datetime.date(2010, 8, 9),
'end_date': datetime.date(2010, 8, 11),
'_scraped_name': '2010 - Forty-ninth Legislature - Ninth Special Session',
},
'49th-2nd-regular':
{'type': 'primary', 'session_id': 93,
'display_name': '49th Legislature, 2nd Regular Session (2010)',
'start_date': datetime.date(2010, 1, 11),
'end_date': datetime.date(2010, 4, 29),
'_scraped_name': '2010 - Forty-ninth Legislature - Second Regular Session',
},
'50th-1st-special':
{'type': 'special', 'session_id': 104,
'display_name': '50th Legislature, 1st Special Session (2011)',
'start_date': datetime.date(2011, 1, 19),
'end_date': datetime.date(2011, 1, 20),
'_scraped_name': '2011 - Fiftieth Legislature - First Special Session',
},
'50th-2nd-special':
{'type': 'special', 'session_id': 105,
'display_name': '50th Legislature, 2nd Special Session (2011)',
'start_date': datetime.date(2011, 2, 14),
'end_date': datetime.date(2011, 2, 16),
'_scraped_name': '2011 - Fiftieth Legislature - Second Special Session',
},
'50th-3rd-special':
{'type': 'special', 'session_id': 106,
'display_name': '50th Legislature, 3rd Special Session (2011)',
'start_date': datetime.date(2011, 6, 10),
'end_date': datetime.date(2011, 6, 13),
'_scraped_name': '2011 - Fiftieth Legislature - Third Special Session',
},
'50th-4th-special':
{'type': 'special', 'session_id': 108,
'display_name': '50th Legislature, 4th Special Session (2011)',
'start_date': datetime.date(2011, 11, 1),
'end_date': datetime.date(2011, 11, 1),
'_scraped_name': '2011 - Fiftieth Legislature - Fourth Special Session',
},
'50th-1st-regular':
{'type': 'primary', 'session_id': 102,
'display_name': '50th Legislature, 1st Regular Session (2011)',
'start_date': datetime.date(2011, 1, 10),
'end_date': datetime.date(2011,4,20),
'_scraped_name': '2011 - Fiftieth Legislature - First Regular Session',
},
'50th-2nd-regular':
{'type': 'primary', 'session_id': 107,
'display_name': '50th Legislature, 2nd Regular Session (2012)',
'_scraped_name': '2012 - Fiftieth Legislature - Second Regular Session',
#'start_date': , 'end_date':
},
'51st-1st-regular':
{'type': 'primary', 'session_id': 110,
'display_name': '51st Legislature - 1st Regular Session (2013)',
'_scraped_name': '2013 - Fifty-first Legislature - First Regular Session'
},
'51st-1st-special':
{'type': 'primary', 'session_id': 111,
'display_name': '51st Legislature - 1st Special Session (2013)',
'_scraped_name': '2013 - Fifty-first Legislature - First Special Session'
},
'51st-2nd-regular':
{'type': 'primary', 'session_id': 112,
'display_name': '51st Legislature - 2nd Regular Session',
'_scraped_name': '2014 - Fifty-first Legislature - Second Regular Session'
},
'51st-2nd-special':
{'type': 'special', 'session_id': 113,
'display_name': '51st Legislature - 2nd Special Session',
'_scraped_name': '2014 - Fifty-first Legislature - Second Special Session'
},
'52nd-1st-regular':
{'type': 'primary', 'session_id': 114,
'display_name': '52nd Legislature - 1st Regular Session',
'_scraped_name': '2015 - Fifty-second Legislature - First Regular Session'
},
'52nd-1st-special': {
'type': 'special',
'session_id': 116, # Yes, this is weirdly out of order.
'display_name': '52nd Legislature - 1st Special Session',
'_scraped_name': '2015 - Fifty-second Legislature - First Special Session',
},
'52nd-2nd-regular': {
'type': 'primary',
'session_id': 115,
'display_name': '52nd Legislature - 2nd Regular Session',
'_scraped_name': '2016 - Fifty-second Legislature - Second Regular Session',
},
'53rd-1st-regular': {
'type': 'primary',
'session_id': 117,
'display_name': '53rd Legislature - 1st Regular Session',
'_scraped_name': '2017 - Fifty-third Legislature - First Regular Session',
},
# get session id from http://www.azleg.gov/SelectSession.asp select
},
_ignored_scraped_sessions=[
'2008 - Forty-eighth Legislature - Second Regular Session',
'2007 - Forty-eighth Legislature - First Regular Session',
'2006 - Forty-seventh Legislature - First Special Session',
'2006 - Forty-seventh Legislature - Second Regular Session',
'2005 - Forty-seventh Legislature - First Regular Session',
'2004 - Forty-sixth Legislature - Second Regular Session',
'2003 - Forty-sixth Legislature - Second Special Session',
'2003 - Forty-sixth Legislature - First Special Session',
'2003 - Forty-sixth Legislature - First Regular Session',
'2002 - Forty-fifth Legislature - Sixth Special Session',
'2002 - Forty-fifth Legislature - Fifth Special Session',
'2002 - Forty-fifth Legislature - Fourth Special Session',
'2002 - Forty-fifth Legislature - Third Special Session',
'2002 - Forty-fifth Legislature - Second Regular Session',
'2001 - Forty-fifth Legislature - Second Special Session',
'2001 - Forty-fifth Legislature - First Special Session',
'2001 - Forty-fifth Legislature - First Regular Session',
'2000 - Forty-fourth Legislature - Seventh Special Session',
'2000 - Forty-fourth Legislature - Sixth Special Session',
'2000 - Forty-fourth Legislature - Fifth Special Session',
'2000 - Forty-fourth Legislature - Fourth Special Session',
'2000 - Forty-fourth Legislature - Second Regular Session',
'1999 - Forty-fourth Legislature - Third Special Session',
'1999 - Forty-fourth Legislature - Second Special Session',
'1999 - Forty-fourth Legislature - First Special Session',
'1999 - Forty-fourth Legislature - First Regular Session',
'1998 - Forty-third Legislature - Sixth Special Session',
'1998 - Forty-third Legislature - Fifth Special Session',
'1998 - Forty-third Legislature - Fourth Special Session',
'1998 - Forty-third Legislature - Third Special Session',
'1998 - Forty-third Legislature - Second Regular Session',
'1997 - Forty-third Legislature - Second Special Session',
'1997 - Forty-third Legislature - First Special Session',
'1997 - Forty-third Legislature - First Regular Session',
'1996 - Forty-second Legislature - Seventh Special Session',
'1996 - Forty-second Legislature - Sixth Special Session',
'1996 - Forty-second Legislature - Fifth Special Session',
'1996 - Forty-second Legislature - Second Regular Session',
'1995 - Forty-second Legislature - Fourth Special Session',
'1995 - Forty-second Legislature - Third Special Session',
'1995 - Forty-Second Legislature - Second Special Session',
'1995 - Forty-Second Legislature - First Special Session',
'1995 - Forty-second Legislature - First Regular Session',
'1994 - Forty-first Legislature - Ninth Special Session',
'1994 - Forty-first Legislature - Eighth Special Session',
'1994 - Forty-first Legislature - Second Regular Session',
'1993 - Forty-first Legislature - Seventh Special Session',
'1993 - Forty-first Legislature - Sixth Special Session',
'1993 - Forty-first Legislature - Fifth Special Session',
'1993 - Forty-first Legislature - Fourth Special Session',
'1993 - Forty-first Legislature - Third Special Session',
'1993 - Forty-first Legislature - Second Special Session',
'1993 - Forty-first Legislature - First Special Session',
'1993 - Forty-first Legislature - First Regular Session',
'1992 - Fortieth Legislature - Ninth Special Session',
'1992 - Fortieth Legislature - Eighth Special Session',
'1992 - Fortieth Legislature - Seventh Special Session',
'1992 - Fortieth Legislature - Fifth Special Session',
'1992 - Fortieth Legislature - Sixth Special Session',
'1992 - Fortieth Legislature - Second Regular Session',
'1991 - Fortieth Legislature - Fourth Special Session',
'1991 - Fortieth Legislature - Third Special Session',
'1991 - Fortieth Legislature - Second Special Session',
'1991 - Fortieth Legislature - First Special Session',
'1991 - Fortieth Legislature - First Regular Session',
'1990 - Thirty-ninth Legislature - Fifth Special Session',
'1990 - Thirty-ninth Legislature - Fourth Special Session',
'1990 - Thirty-ninth Legislature - Third Special Session',
'1990 - Thirty-ninth Legislature - Second Regular Session',
'1989 - Thirty-ninth Legislature - Second Special Session',
'1989 - Thirty-ninth Legislature - First Special Session',
'1989 - Thirty-ninth Legislature - First Regular Session'
],
feature_flags=[ 'events', 'influenceexplorer' ],
)
def session_list():
import re
import requests
session = requests.Session()
data = session.get('http://www.azleg.gov/')
#TODO: JSON at https://apps.azleg.gov/api/Session/
doc = lxml.html.fromstring(data.text)
sessions = doc.xpath('//select/option/text()')
sessions = [re.sub(r'\(.+$', '', x).strip() for x in sessions]
return sessions
|
jean/sentry
|
refs/heads/master
|
src/sentry/web/frontend/auth_provider_login.py
|
38
|
from __future__ import absolute_import, print_function
from django.core.urlresolvers import reverse
from sentry.auth.helper import AuthHelper
from sentry.web.frontend.base import BaseView
class AuthProviderLoginView(BaseView):
auth_required = False
def handle(self, request):
helper = AuthHelper.get_for_request(request)
if helper is None:
return self.redirect(reverse('sentry-login'))
if not helper.pipeline_is_valid():
return helper.error('Something unexpected happened during authentication.')
return helper.current_step()
|
fentas/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/net/unittestresults_unittest.py
|
124
|
# Copyright (c) 2012, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from unittestresults import UnitTestResults
class UnitTestResultsTest(unittest.TestCase):
def test_nostring(self):
self.assertIsNone(UnitTestResults.results_from_string(None))
def test_emptystring(self):
self.assertIsNone(UnitTestResults.results_from_string(""))
def test_nofailures(self):
no_failures_xml = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="0" disabled="0" errors="0" time="11.35" name="AllTests">
<testsuite name="RenderTableCellDeathTest" tests="3" failures="0" disabled="0" errors="0" time="0.677">
<testcase name="CanSetColumn" status="run" time="0.168" classname="RenderTableCellDeathTest" />
<testcase name="CrashIfSettingUnsetColumnIndex" status="run" time="0.129" classname="RenderTableCellDeathTest" />
<testcase name="CrashIfSettingUnsetRowIndex" status="run" time="0.123" classname="RenderTableCellDeathTest" />
</testsuite>
</testsuites>"""
self.assertEqual([], UnitTestResults.results_from_string(no_failures_xml))
def test_onefailure(self):
one_failure_xml = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="4" failures="1" disabled="0" errors="0" time="11.35" name="AllTests">
<testsuite name="RenderTableCellDeathTest" tests="4" failures="1" disabled="0" errors="0" time="0.677">
<testcase name="CanSetColumn" status="run" time="0.168" classname="RenderTableCellDeathTest" />
<testcase name="CrashIfSettingUnsetColumnIndex" status="run" time="0.129" classname="RenderTableCellDeathTest" />
<testcase name="CrashIfSettingUnsetRowIndex" status="run" time="0.123" classname="RenderTableCellDeathTest" />
<testcase name="FAILS_DivAutoZoomParamsTest" status="run" time="0.02" classname="WebFrameTest">
<failure message="Value of: scale
 Actual: 4
Expected: 1" type=""><![CDATA[../../Source/WebKit/chromium/tests/WebFrameTest.cpp:191
Value of: scale
Actual: 4
Expected: 1]]></failure>
</testcase>
</testsuite>
</testsuites>"""
expected = ["WebFrameTest.FAILS_DivAutoZoomParamsTest"]
self.assertEqual(expected, UnitTestResults.results_from_string(one_failure_xml))
def test_multiple_failures_per_test(self):
multiple_failures_per_test_xml = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="4" failures="2" disabled="0" errors="0" time="11.35" name="AllTests">
<testsuite name="UnitTests" tests="4" failures="2" disable="0" errors="0" time="10.0">
<testcase name="TestOne" status="run" time="0.5" classname="ClassOne">
<failure message="Value of: pi
 Actual: 3
Expected: 3.14" type=""><![CDATA[../../Source/WebKit/chromium/tests/ClassOneTest.cpp:42
Value of: pi
Actual: 3
Expected: 3.14]]></failure>
</testcase>
<testcase name="TestTwo" status="run" time="0.5" classname="ClassTwo">
<failure message="Value of: e
 Actual: 2
Expected: 2.71" type=""><![CDATA[../../Source/WebKit/chromium/tests/ClassTwoTest.cpp:30
Value of: e
Actual: 2
Expected: 2.71]]></failure>
<failure message="Value of: tau
 Actual: 6
Expected: 6.28" type=""><![CDATA[../../Source/WebKit/chromium/tests/ClassTwoTest.cpp:55
Value of: tau
Actual: 6
Expected: 6.28]]></failure>
</testcase>
</testsuite>
</testsuites>"""
expected = ["ClassOne.TestOne", "ClassTwo.TestTwo"]
self.assertEqual(expected, UnitTestResults.results_from_string(multiple_failures_per_test_xml))
|
zeha/multiapt
|
refs/heads/master
|
extlib/paramiko-1.7.3/paramiko/util.py
|
3
|
# Copyright (C) 2003-2007 Robey Pointer <robey@lag.net>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Useful functions used by the rest of paramiko.
"""
from __future__ import generators
from binascii import hexlify, unhexlify
import sys
import struct
import traceback
import threading
from paramiko.common import *
from paramiko.config import SSHConfig
# Change by RogerB - python < 2.3 doesn't have enumerate so we implement it
if sys.version_info < (2,3):
class enumerate:
def __init__ (self, sequence):
self.sequence = sequence
def __iter__ (self):
count = 0
for item in self.sequence:
yield (count, item)
count += 1
def inflate_long(s, always_positive=False):
"turns a normalized byte string into a long-int (adapted from Crypto.Util.number)"
out = 0L
negative = 0
if not always_positive and (len(s) > 0) and (ord(s[0]) >= 0x80):
negative = 1
if len(s) % 4:
filler = '\x00'
if negative:
filler = '\xff'
s = filler * (4 - len(s) % 4) + s
for i in range(0, len(s), 4):
out = (out << 32) + struct.unpack('>I', s[i:i+4])[0]
if negative:
out -= (1L << (8 * len(s)))
return out
def deflate_long(n, add_sign_padding=True):
"turns a long-int into a normalized byte string (adapted from Crypto.Util.number)"
# after much testing, this algorithm was deemed to be the fastest
s = ''
n = long(n)
while (n != 0) and (n != -1):
s = struct.pack('>I', n & 0xffffffffL) + s
n = n >> 32
# strip off leading zeros, FFs
for i in enumerate(s):
if (n == 0) and (i[1] != '\000'):
break
if (n == -1) and (i[1] != '\xff'):
break
else:
# degenerate case, n was either 0 or -1
i = (0,)
if n == 0:
s = '\000'
else:
s = '\xff'
s = s[i[0]:]
if add_sign_padding:
if (n == 0) and (ord(s[0]) >= 0x80):
s = '\x00' + s
if (n == -1) and (ord(s[0]) < 0x80):
s = '\xff' + s
return s
def format_binary_weird(data):
out = ''
for i in enumerate(data):
out += '%02X' % ord(i[1])
if i[0] % 2:
out += ' '
if i[0] % 16 == 15:
out += '\n'
return out
def format_binary(data, prefix=''):
x = 0
out = []
while len(data) > x + 16:
out.append(format_binary_line(data[x:x+16]))
x += 16
if x < len(data):
out.append(format_binary_line(data[x:]))
return [prefix + x for x in out]
def format_binary_line(data):
left = ' '.join(['%02X' % ord(c) for c in data])
right = ''.join([('.%c..' % c)[(ord(c)+63)//95] for c in data])
return '%-50s %s' % (left, right)
def hexify(s):
return hexlify(s).upper()
def unhexify(s):
return unhexlify(s)
def safe_string(s):
out = ''
for c in s:
if (ord(c) >= 32) and (ord(c) <= 127):
out += c
else:
out += '%%%02X' % ord(c)
return out
# ''.join([['%%%02X' % ord(c), c][(ord(c) >= 32) and (ord(c) <= 127)] for c in s])
def bit_length(n):
norm = deflate_long(n, 0)
hbyte = ord(norm[0])
bitlen = len(norm) * 8
while not (hbyte & 0x80):
hbyte <<= 1
bitlen -= 1
return bitlen
def tb_strings():
return ''.join(traceback.format_exception(*sys.exc_info())).split('\n')
def generate_key_bytes(hashclass, salt, key, nbytes):
"""
Given a password, passphrase, or other human-source key, scramble it
through a secure hash into some keyworthy bytes. This specific algorithm
is used for encrypting/decrypting private key files.
@param hashclass: class from L{Crypto.Hash} that can be used as a secure
hashing function (like C{MD5} or C{SHA}).
@type hashclass: L{Crypto.Hash}
@param salt: data to salt the hash with.
@type salt: string
@param key: human-entered password or passphrase.
@type key: string
@param nbytes: number of bytes to generate.
@type nbytes: int
@return: key data
@rtype: string
"""
keydata = ''
digest = ''
if len(salt) > 8:
salt = salt[:8]
while nbytes > 0:
hash_obj = hashclass.new()
if len(digest) > 0:
hash_obj.update(digest)
hash_obj.update(key)
hash_obj.update(salt)
digest = hash_obj.digest()
size = min(nbytes, len(digest))
keydata += digest[:size]
nbytes -= size
return keydata
def load_host_keys(filename):
"""
Read a file of known SSH host keys, in the format used by openssh, and
return a compound dict of C{hostname -> keytype ->} L{PKey <paramiko.pkey.PKey>}.
The hostname may be an IP address or DNS name. The keytype will be either
C{"ssh-rsa"} or C{"ssh-dss"}.
This type of file unfortunately doesn't exist on Windows, but on posix,
it will usually be stored in C{os.path.expanduser("~/.ssh/known_hosts")}.
Since 1.5.3, this is just a wrapper around L{HostKeys}.
@param filename: name of the file to read host keys from
@type filename: str
@return: dict of host keys, indexed by hostname and then keytype
@rtype: dict(hostname, dict(keytype, L{PKey <paramiko.pkey.PKey>}))
"""
from paramiko.hostkeys import HostKeys
return HostKeys(filename)
def parse_ssh_config(file_obj):
"""
Provided only as a backward-compatible wrapper around L{SSHConfig}.
"""
config = SSHConfig()
config.parse(file_obj)
return config
def lookup_ssh_host_config(hostname, config):
"""
Provided only as a backward-compatible wrapper around L{SSHConfig}.
"""
return config.lookup(hostname)
def mod_inverse(x, m):
# it's crazy how small python can make this function.
u1, u2, u3 = 1, 0, m
v1, v2, v3 = 0, 1, x
while v3 > 0:
q = u3 // v3
u1, v1 = v1, u1 - v1 * q
u2, v2 = v2, u2 - v2 * q
u3, v3 = v3, u3 - v3 * q
if u2 < 0:
u2 += m
return u2
_g_thread_ids = {}
_g_thread_counter = 0
_g_thread_lock = threading.Lock()
def get_thread_id():
global _g_thread_ids, _g_thread_counter, _g_thread_lock
tid = id(threading.currentThread())
try:
return _g_thread_ids[tid]
except KeyError:
_g_thread_lock.acquire()
try:
_g_thread_counter += 1
ret = _g_thread_ids[tid] = _g_thread_counter
finally:
_g_thread_lock.release()
return ret
def log_to_file(filename, level=DEBUG):
"send paramiko logs to a logfile, if they're not already going somewhere"
l = logging.getLogger("paramiko")
if len(l.handlers) > 0:
return
l.setLevel(level)
f = open(filename, 'w')
lh = logging.StreamHandler(f)
lh.setFormatter(logging.Formatter('%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d %(name)s: %(message)s',
'%Y%m%d-%H:%M:%S'))
l.addHandler(lh)
# make only one filter object, so it doesn't get applied more than once
class PFilter (object):
def filter(self, record):
record._threadid = get_thread_id()
return True
_pfilter = PFilter()
def get_logger(name):
l = logging.getLogger(name)
l.addFilter(_pfilter)
return l
|
Intel-tensorflow/tensorflow
|
refs/heads/master
|
tensorflow/python/keras/engine/node_test.py
|
5
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#,============================================================================
"""Tests for layer graphs construction & handling."""
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.keras.engine import node as node_module
from tensorflow.python.platform import test
class DummyTensor:
def __init__(self, shape=None):
self.shape = shape
class DummyLayer(base_layer.Layer):
pass
class NetworkConstructionTest(keras_parameterized.TestCase):
def test_chained_node_construction(self):
# test basics
a = DummyTensor(shape=(None, 32))
b = DummyTensor(shape=(None, 32))
a_layer = DummyLayer()
node = node_module.Node(a_layer, outputs=a)
self.assertEqual(node.outbound_layer, a_layer)
self.assertTrue(node.is_input)
self.assertListEqual(node.inbound_layers, [])
self.assertListEqual(node.input_tensors, [a])
self.assertListEqual(node.input_shapes, [(None, 32)])
self.assertListEqual(node.output_tensors, [a])
self.assertListEqual(node.output_shapes, [(None, 32)])
b_layer = DummyLayer()
node_module.Node(b_layer, outputs=b)
dense = DummyLayer()
a_2 = DummyTensor()
node_a = node_module.Node(layer=dense, call_args=(a,), outputs=a_2)
b_2 = DummyTensor()
node_b = node_module.Node(layer=dense, call_args=(b,), outputs=b_2)
# test the node attributes
self.assertFalse(node_a.is_input)
self.assertFalse(node_b.is_input)
self.assertEqual(node_a.call_args, (a,))
self.assertEqual(node_a.call_kwargs, {})
self.assertEqual(node_a.outputs, a_2)
# Test the layer wiring
self.assertLen(dense._inbound_nodes, 2)
self.assertLen(dense._outbound_nodes, 0)
self.assertEqual(dense._inbound_nodes, [node_a, node_b])
self.assertEqual(dense._inbound_nodes[0].inbound_layers, a_layer)
self.assertEqual(dense._inbound_nodes[0].outbound_layer, dense)
self.assertEqual(dense._inbound_nodes[1].inbound_layers, b_layer)
self.assertEqual(dense._inbound_nodes[1].outbound_layer, dense)
self.assertIs(dense._inbound_nodes[0].input_tensors, a)
self.assertIs(dense._inbound_nodes[1].input_tensors, b)
def test_multi_input_node(self):
# test multi-input layer
a = DummyTensor()
b = DummyTensor()
dense = DummyLayer()
a_2 = DummyTensor()
node_module.Node(layer=dense, call_args=(a,), outputs=a_2)
b_2 = DummyTensor()
node_module.Node(layer=dense, call_args=(b,), outputs=b_2)
concat_layer = DummyLayer()
merged = DummyTensor()
node_module.Node(layer=concat_layer, call_args=([a_2, b_2],),
outputs=merged)
merge_layer, merge_node_index, merge_tensor_index = merged._keras_history
self.assertEqual(merge_node_index, 0)
self.assertEqual(merge_tensor_index, 0)
self.assertLen(merge_layer._inbound_nodes, 1)
self.assertLen(merge_layer._outbound_nodes, 0)
self.assertLen(merge_layer._inbound_nodes[0].input_tensors, 2)
self.assertEqual(merge_layer._inbound_nodes[0].input_tensors, [a_2, b_2])
self.assertLen(merge_layer._inbound_nodes[0].inbound_layers, 2)
def test_arg_and_kwarg_mix(self):
input_layer = DummyLayer()
input_layer_2 = DummyLayer()
a = DummyTensor()
node_a = node_module.Node(layer=input_layer, outputs=a)
b = DummyTensor()
node_b = node_module.Node(layer=input_layer_2, outputs=b)
arg_2 = DummyTensor()
arg_3 = DummyTensor()
node_c = node_module.Node(layer=input_layer, outputs=arg_3)
kwarg_x = DummyTensor()
kwarg_y = DummyTensor()
node_d = node_module.Node(layer=input_layer, outputs=kwarg_y)
merge_layer = DummyLayer()
merged = DummyTensor()
node = node_module.Node(layer=merge_layer,
call_args=([a, b], arg_2, arg_3),
call_kwargs={'x': kwarg_x, 'y': kwarg_y},
outputs=merged)
merge_layer, merge_node_index, merge_tensor_index = merged._keras_history
# Check the saved call args/kwargs
self.assertEqual(([a, b], arg_2, arg_3), node.call_args)
self.assertEqual({'x': kwarg_x, 'y': kwarg_y}, node.call_kwargs)
# Only the inputs that were produced by input nodes should appear in
# keras_tensors
self.assertEqual({a, b, arg_3, kwarg_y}, set(node.keras_inputs))
self.assertEqual(set(node.parent_nodes), {node_a, node_b, node_c, node_d})
# Check the layer wirings
self.assertEqual(merge_node_index, 0)
self.assertEqual(merge_tensor_index, 0)
self.assertLen(merge_layer._inbound_nodes, 1)
self.assertLen(merge_layer._outbound_nodes, 0)
self.assertLen(input_layer._outbound_nodes, 3)
self.assertLen(input_layer_2._outbound_nodes, 1)
# The 'backwards compatibility' attributes should only check the
# first call argument
self.assertLen(merge_layer._inbound_nodes[0].input_tensors, 2)
self.assertEqual(merge_layer._inbound_nodes[0].input_tensors, [a, b])
self.assertLen(merge_layer._inbound_nodes[0].inbound_layers, 2)
if __name__ == '__main__':
test.main()
|
HKUST-SING/tensorflow
|
refs/heads/master
|
tensorflow/contrib/bayesflow/python/kernel_tests/stochastic_variables_test.py
|
80
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for stochastic graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib import distributions
from tensorflow.contrib.bayesflow.python.ops import stochastic_tensor
from tensorflow.contrib.bayesflow.python.ops import stochastic_variables
from tensorflow.contrib.bayesflow.python.ops import variational_inference_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
sv = stochastic_variables
st = stochastic_tensor
vi = variational_inference_impl
dist = distributions
class StochasticVariablesTest(test.TestCase):
def testStochasticVariables(self):
shape = (10, 20)
with variable_scope.variable_scope(
"stochastic_variables",
custom_getter=sv.make_stochastic_variable_getter(
dist_cls=dist.NormalWithSoftplusScale)):
v = variable_scope.get_variable("sv", shape)
self.assertTrue(isinstance(v, st.StochasticTensor))
self.assertTrue(isinstance(v.distribution, dist.NormalWithSoftplusScale))
self.assertEqual(
{"stochastic_variables/sv_loc", "stochastic_variables/sv_scale"},
set([v.op.name for v in variables.global_variables()]))
self.assertEqual(
set(variables.trainable_variables()), set(variables.global_variables()))
v = ops.convert_to_tensor(v)
self.assertEqual(list(shape), v.get_shape().as_list())
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
self.assertEqual(shape, sess.run(v).shape)
def testStochasticVariablesWithConstantInitializer(self):
shape = (10, 20)
with variable_scope.variable_scope(
"stochastic_variables",
custom_getter=sv.make_stochastic_variable_getter(
dist_cls=dist.NormalWithSoftplusScale,
dist_kwargs={"validate_args": True},
param_initializers={
"loc": np.ones(shape) * 4.,
"scale": np.ones(shape) * 2.
})):
v = variable_scope.get_variable("sv")
for var in variables.global_variables():
if "loc" in var.name:
mu_var = var
if "scale" in var.name:
sigma_var = var
v = ops.convert_to_tensor(v)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
self.assertAllEqual(np.ones(shape) * 4., sess.run(mu_var))
self.assertAllEqual(np.ones(shape) * 2., sess.run(sigma_var))
self.assertEqual(shape, sess.run(v).shape)
def testStochasticVariablesWithCallableInitializer(self):
shape = (10, 20)
def sigma_init(shape, dtype, partition_info):
_ = partition_info
return array_ops.ones(shape, dtype=dtype) * 2.
with variable_scope.variable_scope(
"stochastic_variables",
custom_getter=sv.make_stochastic_variable_getter(
dist_cls=dist.NormalWithSoftplusScale,
dist_kwargs={"validate_args": True},
param_initializers={
"loc": np.ones(
shape, dtype=np.float32) * 4.,
"scale": sigma_init
})):
v = variable_scope.get_variable("sv", shape)
for var in variables.global_variables():
if "loc" in var.name:
mu_var = var
if "scale" in var.name:
sigma_var = var
v = ops.convert_to_tensor(v)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
self.assertAllEqual(np.ones(shape) * 4., sess.run(mu_var))
self.assertAllEqual(np.ones(shape) * 2., sess.run(sigma_var))
self.assertEqual(shape, sess.run(v).shape)
def testStochasticVariablesWithPrior(self):
shape = (10, 20)
prior = dist.Normal(0., 1.)
with variable_scope.variable_scope(
"stochastic_variables",
custom_getter=sv.make_stochastic_variable_getter(
dist_cls=dist.NormalWithSoftplusScale, prior=prior)):
w = variable_scope.get_variable("weights", shape)
x = random_ops.random_uniform((8, 10))
y = math_ops.matmul(x, w)
prior_map = vi._find_variational_and_priors(y, None)
self.assertEqual(prior_map[w], prior)
elbo = vi.elbo(y, keep_batch_dim=False)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
sess.run(elbo)
def testStochasticVariablesWithCallablePriorInitializer(self):
def prior_init(shape, dtype):
return dist.Normal(
array_ops.zeros(shape, dtype), array_ops.ones(shape, dtype))
with variable_scope.variable_scope(
"stochastic_variables",
custom_getter=sv.make_stochastic_variable_getter(
dist_cls=dist.NormalWithSoftplusScale, prior=prior_init)):
w = variable_scope.get_variable("weights", (10, 20))
x = random_ops.random_uniform((8, 10))
y = math_ops.matmul(x, w)
prior_map = vi._find_variational_and_priors(y, None)
self.assertTrue(isinstance(prior_map[w], dist.Normal))
elbo = vi.elbo(y, keep_batch_dim=False)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
sess.run(elbo)
if __name__ == "__main__":
test.main()
|
SwordGO/SwordGO_app
|
refs/heads/master
|
example/kivymap/.buildozer/android/app/_applibs/requests/packages/chardet/hebrewprober.py
|
2928
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
russellb/nova
|
refs/heads/master
|
nova/tests/api/openstack/compute/test_images.py
|
1
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests of the new image services, both as a service layer,
and as a WSGI layer
"""
import urlparse
from lxml import etree
import stubout
import webob
from nova import flags
from nova.api.openstack.compute import images
from nova.api.openstack.compute.views import images as images_view
from nova.api.openstack import xmlutil
from nova import test
from nova import utils
from nova.tests.api.openstack import fakes
FLAGS = flags.FLAGS
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
NOW_API_FORMAT = "2010-10-11T10:30:22Z"
class ImagesControllerTest(test.TestCase):
"""
Test of the OpenStack API /images application controller w/Glance.
"""
def setUp(self):
"""Run before each test."""
super(ImagesControllerTest, self).setUp()
self.maxDiff = None
self.stubs = stubout.StubOutForTesting()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fakes.stub_out_compute_api_snapshot(self.stubs)
fakes.stub_out_compute_api_backup(self.stubs)
fakes.stub_out_glance(self.stubs)
self.controller = images.Controller()
def tearDown(self):
"""Run after each test."""
self.stubs.UnsetAll()
super(ImagesControllerTest, self).tearDown()
def test_get_image(self):
fake_req = fakes.HTTPRequest.blank('/v2/fake/images/123')
actual_image = self.controller.show(fake_req, '124')
href = "http://localhost/v2/fake/images/124"
bookmark = "http://localhost/fake/images/124"
alternate = "%s/fake/images/124" % utils.generate_glance_url()
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v2/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
expected_image = {
"image": {
"id": "124",
"name": "queued snapshot",
"updated": NOW_API_FORMAT,
"created": NOW_API_FORMAT,
"status": "SAVING",
"progress": 25,
"minDisk": 0,
"minRam": 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"metadata": {
"instance_uuid": server_uuid,
"user_id": "fake",
},
"links": [{
"rel": "self",
"href": href,
},
{
"rel": "bookmark",
"href": bookmark,
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate
}],
},
}
self.assertDictMatch(expected_image, actual_image)
def test_get_image_with_custom_prefix(self):
self.flags(osapi_compute_link_prefix='http://zoo.com:42',
osapi_glance_link_prefix='http://circus.com:34')
fake_req = fakes.HTTPRequest.blank('/v2/fake/images/123')
actual_image = self.controller.show(fake_req, '124')
href = "http://zoo.com:42/v2/fake/images/124"
bookmark = "http://zoo.com:42/fake/images/124"
alternate = "http://circus.com:34/fake/images/124"
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v2/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
expected_image = {
"image": {
"id": "124",
"name": "queued snapshot",
"updated": NOW_API_FORMAT,
"created": NOW_API_FORMAT,
"status": "SAVING",
"progress": 25,
"minDisk": 0,
"minRam": 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"metadata": {
"instance_uuid": server_uuid,
"user_id": "fake",
},
"links": [{
"rel": "self",
"href": href,
},
{
"rel": "bookmark",
"href": bookmark,
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate
}],
},
}
self.assertDictMatch(expected_image, actual_image)
def test_get_image_404(self):
fake_req = fakes.HTTPRequest.blank('/v2/fake/images/unknown')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, fake_req, 'unknown')
def test_get_image_index(self):
fake_req = fakes.HTTPRequest.blank('/v2/fake/images')
response_list = self.controller.index(fake_req)['images']
expected_images = [
{
"id": "123",
"name": "public image",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/123",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/123" %
utils.generate_glance_url()
},
],
},
{
"id": "124",
"name": "queued snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/124",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/124" %
utils.generate_glance_url()
},
],
},
{
"id": "125",
"name": "saving snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/125",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/125",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/125" %
utils.generate_glance_url()
},
],
},
{
"id": "126",
"name": "active snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/126",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/126",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/126" %
utils.generate_glance_url()
},
],
},
{
"id": "127",
"name": "killed snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/127",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/127",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/127" %
utils.generate_glance_url()
},
],
},
{
"id": "128",
"name": "deleted snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/128",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/128",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/128" %
utils.generate_glance_url()
},
],
},
{
"id": "129",
"name": "pending_delete snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/129",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/129",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/129" %
utils.generate_glance_url()
},
],
},
{
"id": "130",
"name": None,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/130",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/130",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/130" %
utils.generate_glance_url()
},
],
},
]
self.assertDictListMatch(response_list, expected_images)
def test_get_image_index_with_limit(self):
request = fakes.HTTPRequest.blank('/v2/fake/images?limit=3')
response = self.controller.index(request)
response_list = response["images"]
response_links = response["images_links"]
alternate = "%s/fake/images/%s"
expected_images = [
{
"id": "123",
"name": "public image",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/123",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 123),
},
],
},
{
"id": "124",
"name": "queued snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/124",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 124),
},
],
},
{
"id": "125",
"name": "saving snapshot",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/images/125",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/125",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 125),
},
],
},
]
self.assertDictListMatch(response_list, expected_images)
self.assertEqual(response_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(response_links[0]['href'])
self.assertEqual('/v2/fake/images', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
self.assertDictMatch({'limit': ['3'], 'marker': ['125']}, params)
def test_get_image_index_with_limit_and_extra_params(self):
request = fakes.HTTPRequest.blank('/v2/fake/images?limit=3&extra=bo')
response = self.controller.index(request)
response_links = response["images_links"]
self.assertEqual(response_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(response_links[0]['href'])
self.assertEqual('/v2/fake/images', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
self.assertDictMatch(
{'limit': ['3'], 'marker': ['125'], 'extra': ['bo']},
params)
def test_get_image_index_with_big_limit(self):
"""
Make sure we don't get images_links if limit is set
and the number of images returned is < limit
"""
request = fakes.HTTPRequest.blank('/v2/fake/images?limit=30')
response = self.controller.index(request)
self.assertEqual(response.keys(), ['images'])
self.assertEqual(len(response['images']), 8)
def test_get_image_details(self):
request = fakes.HTTPRequest.blank('/v2/fake/images/detail')
response = self.controller.detail(request)
response_list = response["images"]
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v2/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
alternate = "%s/fake/images/%s"
expected = [{
'id': '123',
'name': 'public image',
'metadata': {'key1': 'value1'},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
'minDisk': 10,
'minRam': 128,
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/123",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 123),
}],
},
{
'id': '124',
'name': 'queued snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 25,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/124",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 124),
}],
},
{
'id': '125',
'name': 'saving snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 50,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/125",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/125",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/125" % utils.generate_glance_url()
}],
},
{
'id': '126',
'name': 'active snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/126",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/126",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/126" % utils.generate_glance_url()
}],
},
{
'id': '127',
'name': 'killed snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ERROR',
'progress': 0,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/127",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/127",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/127" % utils.generate_glance_url()
}],
},
{
'id': '128',
'name': 'deleted snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/128",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/128",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/128" % utils.generate_glance_url()
}],
},
{
'id': '129',
'name': 'pending_delete snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/129",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/129",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/129" % utils.generate_glance_url()
}],
},
{
'id': '130',
'name': None,
'metadata': {},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
'minDisk': 0,
'minRam': 0,
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/130",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/130",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/130" % utils.generate_glance_url()
}],
},
]
self.assertDictListMatch(expected, response_list)
def test_get_image_details_with_limit(self):
request = fakes.HTTPRequest.blank('/v2/fake/images/detail?limit=2')
response = self.controller.detail(request)
response_list = response["images"]
response_links = response["images_links"]
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v2/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
alternate = "%s/fake/images/%s"
expected = [{
'id': '123',
'name': 'public image',
'metadata': {'key1': 'value1'},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'minDisk': 10,
'progress': 100,
'minRam': 128,
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/123",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 123),
}],
},
{
'id': '124',
'name': 'queued snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
'minDisk': 0,
'progress': 25,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/124",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (utils.generate_glance_url(), 124),
}],
}]
self.assertDictListMatch(expected, response_list)
href_parts = urlparse.urlparse(response_links[0]['href'])
self.assertEqual('/v2/fake/images', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
self.assertDictMatch({'limit': ['2'], 'marker': ['124']}, params)
def test_image_filter_with_name(self):
image_service = self.mox.CreateMockAnything()
filters = {'name': 'testname'}
request = fakes.HTTPRequest.blank('/v2/images?name=testname')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_with_min_ram(self):
image_service = self.mox.CreateMockAnything()
filters = {'min_ram': '0'}
request = fakes.HTTPRequest.blank('/v2/images?minRam=0')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_with_min_disk(self):
image_service = self.mox.CreateMockAnything()
filters = {'min_disk': '7'}
request = fakes.HTTPRequest.blank('/v2/images?minDisk=7')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_with_status(self):
image_service = self.mox.CreateMockAnything()
filters = {'status': 'ACTIVE'}
request = fakes.HTTPRequest.blank('/v2/images?status=ACTIVE')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_with_property(self):
image_service = self.mox.CreateMockAnything()
filters = {'property-test': '3'}
request = fakes.HTTPRequest.blank('/v2/images?property-test=3')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_server(self):
image_service = self.mox.CreateMockAnything()
uuid = 'fa95aaf5-ab3b-4cd8-88c0-2be7dd051aaf'
ref = 'http://localhost:8774/servers/' + uuid
filters = {'property-instance_uuid': uuid}
request = fakes.HTTPRequest.blank('/v2/images?server=' + ref)
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_changes_since(self):
image_service = self.mox.CreateMockAnything()
filters = {'changes-since': '2011-01-24T17:08Z'}
request = fakes.HTTPRequest.blank('/v2/images?changes-since='
'2011-01-24T17:08Z')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_with_type(self):
image_service = self.mox.CreateMockAnything()
filters = {'property-image_type': 'BASE'}
request = fakes.HTTPRequest.blank('/v2/images?type=BASE')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_filter_not_supported(self):
image_service = self.mox.CreateMockAnything()
filters = {'status': 'ACTIVE'}
request = fakes.HTTPRequest.blank('/v2/images?status=ACTIVE&'
'UNSUPPORTEDFILTER=testname')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.detail(request)
self.mox.VerifyAll()
def test_image_no_filters(self):
image_service = self.mox.CreateMockAnything()
filters = {}
request = fakes.HTTPRequest.blank('/v2/images')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_detail_filter_with_name(self):
image_service = self.mox.CreateMockAnything()
filters = {'name': 'testname'}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail'
'?name=testname')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.detail(request)
self.mox.VerifyAll()
def test_image_detail_filter_with_status(self):
image_service = self.mox.CreateMockAnything()
filters = {'status': 'ACTIVE'}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail'
'?status=ACTIVE')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.detail(request)
self.mox.VerifyAll()
def test_image_detail_filter_with_property(self):
image_service = self.mox.CreateMockAnything()
filters = {'property-test': '3'}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail'
'?property-test=3')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.detail(request)
self.mox.VerifyAll()
def test_image_detail_filter_server_href(self):
image_service = self.mox.CreateMockAnything()
uuid = 'fa95aaf5-ab3b-4cd8-88c0-2be7dd051aaf'
ref = 'http://localhost:8774/servers/' + uuid
url = '/v2/fake/images/detail?server=' + ref
filters = {'property-instance_uuid': uuid}
request = fakes.HTTPRequest.blank(url)
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_detail_filter_server_uuid(self):
image_service = self.mox.CreateMockAnything()
uuid = 'fa95aaf5-ab3b-4cd8-88c0-2be7dd051aaf'
url = '/v2/fake/images/detail?server=' + uuid
filters = {'property-instance_uuid': uuid}
request = fakes.HTTPRequest.blank(url)
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_detail_filter_changes_since(self):
image_service = self.mox.CreateMockAnything()
filters = {'changes-since': '2011-01-24T17:08Z'}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail'
'?changes-since=2011-01-24T17:08Z')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_detail_filter_with_type(self):
image_service = self.mox.CreateMockAnything()
filters = {'property-image_type': 'BASE'}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail?type=BASE')
context = request.environ['nova.context']
image_service.index(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.index(request)
self.mox.VerifyAll()
def test_image_detail_filter_not_supported(self):
image_service = self.mox.CreateMockAnything()
filters = {'status': 'ACTIVE'}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail?status='
'ACTIVE&UNSUPPORTEDFILTER=testname')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.detail(request)
self.mox.VerifyAll()
def test_image_detail_no_filters(self):
image_service = self.mox.CreateMockAnything()
filters = {}
request = fakes.HTTPRequest.blank('/v2/fake/images/detail')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.Controller(image_service=image_service)
controller.detail(request)
self.mox.VerifyAll()
def test_generate_alternate_link(self):
view = images_view.ViewBuilder()
request = fakes.HTTPRequest.blank('/v2/fake/images/1')
generated_url = view._get_alternate_link(request, 1)
actual_url = "%s/fake/images/1" % utils.generate_glance_url()
self.assertEqual(generated_url, actual_url)
def test_delete_image(self):
request = fakes.HTTPRequest.blank('/v2/fake/images/124')
request.method = 'DELETE'
response = self.controller.delete(request, '124')
self.assertEqual(response.status_int, 204)
def test_delete_image_not_found(self):
request = fakes.HTTPRequest.blank('/v2/fake/images/300')
request.method = 'DELETE'
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, request, '300')
class ImageXMLSerializationTest(test.TestCase):
TIMESTAMP = "2010-10-11T10:30:22Z"
SERVER_UUID = 'aa640691-d1a7-4a67-9d3c-d35ee6b3cc74'
SERVER_HREF = 'http://localhost/v2/servers/' + SERVER_UUID
SERVER_BOOKMARK = 'http://localhost/servers/' + SERVER_UUID
IMAGE_HREF = 'http://localhost/v2/fake/images/%s'
IMAGE_NEXT = 'http://localhost/v2/fake/images?limit=%s&marker=%s'
IMAGE_BOOKMARK = 'http://localhost/fake/images/%s'
def test_xml_declaration(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_show(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'minRam': 10,
'minDisk': 100,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status', 'progress']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_zero_metadata(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
meta_nodes = root.findall('{0}meta'.format(ATOMNS))
self.assertEqual(len(meta_nodes), 0)
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_image_no_metadata_key(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
meta_nodes = root.findall('{0}meta'.format(ATOMNS))
self.assertEqual(len(meta_nodes), 0)
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_no_server(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root, None)
def test_show_with_min_ram(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'minRam': 256,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status', 'progress',
'minRam']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_with_min_disk(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'minDisk': 5,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status', 'progress',
'minDisk']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_index(self):
serializer = images.MinimalImagesTemplate()
fixture = {
'images': [
{
'id': 1,
'name': 'Image1',
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
{
'id': 2,
'name': 'Image2',
'links': [
{
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 2,
'rel': 'bookmark',
},
],
},
]
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images_index')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 2)
for i, image_elem in enumerate(image_elems):
image_dict = fixture['images'][i]
for key in ['name', 'id']:
self.assertEqual(image_elem.get(key), str(image_dict[key]))
link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_index_with_links(self):
serializer = images.MinimalImagesTemplate()
fixture = {
'images': [
{
'id': 1,
'name': 'Image1',
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
{
'id': 2,
'name': 'Image2',
'links': [
{
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 2,
'rel': 'bookmark',
},
],
},
],
'images_links': [
{
'rel': 'next',
'href': self.IMAGE_NEXT % (2, 2),
}
],
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images_index')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 2)
for i, image_elem in enumerate(image_elems):
image_dict = fixture['images'][i]
for key in ['name', 'id']:
self.assertEqual(image_elem.get(key), str(image_dict[key]))
link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
# Check images_links
images_links = root.findall('{0}link'.format(ATOMNS))
for i, link in enumerate(fixture['images_links']):
for key, value in link.items():
self.assertEqual(images_links[i].get(key), value)
def test_index_zero_images(self):
serializer = images.MinimalImagesTemplate()
fixtures = {
'images': [],
}
output = serializer.serialize(fixtures)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images_index')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 0)
def test_detail(self):
serializer = images.ImagesTemplate()
fixture = {
'images': [
{
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
{
'id': '2',
'name': 'Image2',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'SAVING',
'progress': 80,
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 2,
'rel': 'bookmark',
},
],
},
]
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 2)
for i, image_elem in enumerate(image_elems):
image_dict = fixture['images'][i]
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(image_elem.get(key), str(image_dict[key]))
link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
|
kkdd/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Tools/scripts/linktree.py
|
101
|
#! /usr/bin/env python
# linktree
#
# Make a copy of a directory tree with symbolic links to all files in the
# original tree.
# All symbolic links go to a special symbolic link at the top, so you
# can easily fix things if the original source tree moves.
# See also "mkreal".
#
# usage: mklinks oldtree newtree
import sys, os
LINK = '.LINK' # Name of special symlink at the top.
debug = 0
def main():
if not 3 <= len(sys.argv) <= 4:
print 'usage:', sys.argv[0], 'oldtree newtree [linkto]'
return 2
oldtree, newtree = sys.argv[1], sys.argv[2]
if len(sys.argv) > 3:
link = sys.argv[3]
link_may_fail = 1
else:
link = LINK
link_may_fail = 0
if not os.path.isdir(oldtree):
print oldtree + ': not a directory'
return 1
try:
os.mkdir(newtree, 0777)
except os.error, msg:
print newtree + ': cannot mkdir:', msg
return 1
linkname = os.path.join(newtree, link)
try:
os.symlink(os.path.join(os.pardir, oldtree), linkname)
except os.error, msg:
if not link_may_fail:
print linkname + ': cannot symlink:', msg
return 1
else:
print linkname + ': warning: cannot symlink:', msg
linknames(oldtree, newtree, link)
return 0
def linknames(old, new, link):
if debug: print 'linknames', (old, new, link)
try:
names = os.listdir(old)
except os.error, msg:
print old + ': warning: cannot listdir:', msg
return
for name in names:
if name not in (os.curdir, os.pardir):
oldname = os.path.join(old, name)
linkname = os.path.join(link, name)
newname = os.path.join(new, name)
if debug > 1: print oldname, newname, linkname
if os.path.isdir(oldname) and \
not os.path.islink(oldname):
try:
os.mkdir(newname, 0777)
ok = 1
except:
print newname + \
': warning: cannot mkdir:', msg
ok = 0
if ok:
linkname = os.path.join(os.pardir,
linkname)
linknames(oldname, newname, linkname)
else:
os.symlink(linkname, newname)
if __name__ == '__main__':
sys.exit(main())
|
haosdent/rbtools
|
refs/heads/master
|
rbtools/clients/tests.py
|
2
|
from __future__ import print_function, unicode_literals
import os
import re
import sys
import time
from hashlib import md5
from functools import wraps
from random import randint
from tempfile import mktemp
from textwrap import dedent
from nose import SkipTest
from six.moves import cStringIO as StringIO
from rbtools.api.capabilities import Capabilities
from rbtools.clients import RepositoryInfo
from rbtools.clients.bazaar import BazaarClient
from rbtools.clients.errors import (InvalidRevisionSpecError,
TooManyRevisionsError)
from rbtools.clients.git import GitClient
from rbtools.clients.mercurial import MercurialClient
from rbtools.clients.perforce import PerforceClient, P4Wrapper
from rbtools.clients.svn import SVNRepositoryInfo, SVNClient
from rbtools.tests import OptionsStub
from rbtools.utils.checks import is_valid_version
from rbtools.utils.filesystem import is_exe_in_path, load_config, make_tempfile
from rbtools.utils.process import execute
from rbtools.utils.testbase import RBTestBase
class SCMClientTests(RBTestBase):
def setUp(self):
super(SCMClientTests, self).setUp()
self.options = OptionsStub()
self.clients_dir = os.path.dirname(__file__)
class GitClientTests(SCMClientTests):
TESTSERVER = "http://127.0.0.1:8080"
def _run_git(self, command):
return execute(['git'] + command, env=None, split_lines=False,
ignore_errors=False, extra_ignore_errors=(),
translate_newlines=True)
def _git_add_file_commit(self, file, data, msg):
"""Add a file to a git repository with the content of data and commit with msg."""
foo = open(file, 'w')
foo.write(data)
foo.close()
self._run_git(['add', file])
self._run_git(['commit', '-m', msg])
def _git_get_head(self):
return self._run_git(['rev-parse', 'HEAD']).strip()
def setUp(self):
super(GitClientTests, self).setUp()
if not is_exe_in_path('git'):
raise SkipTest('git not found in path')
self.set_user_home(
os.path.join(self.clients_dir, 'testdata', 'homedir'))
self.git_dir = os.path.join(self.clients_dir, 'testdata', 'git-repo')
self.clone_dir = self.chdir_tmp()
self._run_git(['clone', self.git_dir, self.clone_dir])
self.client = GitClient(options=self.options)
self.options.parent_branch = None
def test_get_repository_info_simple(self):
"""Testing GitClient get_repository_info, simple case"""
ri = self.client.get_repository_info()
self.assertTrue(isinstance(ri, RepositoryInfo))
self.assertEqual(ri.base_path, '')
self.assertEqual(ri.path.rstrip("/.git"), self.git_dir)
self.assertTrue(ri.supports_parent_diffs)
self.assertFalse(ri.supports_changesets)
def test_scan_for_server_simple(self):
"""Testing GitClient scan_for_server, simple case"""
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertTrue(server is None)
def test_scan_for_server_reviewboardrc(self):
"""Testing GitClient scan_for_server, .reviewboardrc case"""
rc = open(os.path.join(self.clone_dir, '.reviewboardrc'), 'w')
rc.write('REVIEWBOARD_URL = "%s"' % self.TESTSERVER)
rc.close()
self.client.config = load_config()
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertEqual(server, self.TESTSERVER)
def test_scan_for_server_property(self):
"""Testing GitClient scan_for_server using repo property"""
self._run_git(['config', 'reviewboard.url', self.TESTSERVER])
ri = self.client.get_repository_info()
self.assertEqual(self.client.scan_for_server(ri), self.TESTSERVER)
def test_diff_simple(self):
"""Testing GitClient simple diff case"""
self.client.get_repository_info()
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO1, 'delete and modify stuff')
commit_id = self._git_get_head()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_too_many_revisions(self):
"""Testing GitClient parse_revision_spec with too many revisions"""
self.assertRaises(TooManyRevisionsError,
self.client.parse_revision_spec,
[1, 2, 3])
def test_diff_simple_multiple(self):
"""Testing GitClient simple diff with multiple commits case"""
self.client.get_repository_info()
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
self._git_add_file_commit('foo.txt', FOO2, 'commit 1')
self._git_add_file_commit('foo.txt', FOO3, 'commit 1')
commit_id = self._git_get_head()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'c9a31264f773406edff57a8ed10d9acc')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_exclude(self):
"""Testing GitClient simple diff with file exclusion."""
self.client.get_repository_info()
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
self._git_add_file_commit('exclude.txt', FOO2, 'commit 2')
commit_id = self._git_get_head()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_exclude_in_subdir(self):
"""Testing GitClient simple diff with file exclusion in a subdir"""
base_commit_id = self._git_get_head()
os.mkdir('subdir')
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
os.chdir('subdir')
self._git_add_file_commit('exclude.txt', FOO2, 'commit 2')
self.client.get_repository_info()
commit_id = self._git_get_head()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions,
exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_exclude_root_pattern_in_subdir(self):
"""Testing GitClient diff with file exclusion in the repo root."""
base_commit_id = self._git_get_head()
os.mkdir('subdir')
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
self._git_add_file_commit('exclude.txt', FOO2, 'commit 2')
os.chdir('subdir')
self.client.get_repository_info()
commit_id = self._git_get_head()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(
revisions,
exclude_patterns=[os.path.sep + 'exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_branch_diverge(self):
"""Testing GitClient diff with divergent branches"""
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
self._run_git(['checkout', '-b', 'mybranch', '--track',
'origin/master'])
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO2, 'commit 2')
commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'cfb79a46f7a35b07e21765608a7852f7')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
self._run_git(['checkout', 'master'])
self.client.get_repository_info()
commit_id = self._git_get_head()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_tracking_no_origin(self):
"""Testing GitClient diff with a tracking branch, but no origin remote"""
self._run_git(['remote', 'add', 'quux', self.git_dir])
self._run_git(['fetch', 'quux'])
self._run_git(['checkout', '-b', 'mybranch', '--track', 'quux/master'])
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO1, 'delete and modify stuff')
commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_local_tracking(self):
"""Testing GitClient diff with a local tracking branch"""
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
self._run_git(['checkout', '-b', 'mybranch', '--track', 'master'])
self._git_add_file_commit('foo.txt', FOO2, 'commit 2')
commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'cfb79a46f7a35b07e21765608a7852f7')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_tracking_override(self):
"""Testing GitClient diff with option override for tracking branch"""
self.options.tracking = 'origin/master'
self._run_git(['remote', 'add', 'bad', self.git_dir])
self._run_git(['fetch', 'bad'])
self._run_git(['checkout', '-b', 'mybranch', '--track', 'bad/master'])
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO1, 'commit 1')
commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'69d4616cf985f6b10571036db744e2d8')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_diff_slash_tracking(self):
"""Testing GitClient diff with tracking branch that has slash in its name."""
self._run_git(['fetch', 'origin'])
self._run_git(['checkout', '-b', 'my/branch', '--track',
'origin/not-master'])
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO2, 'commit 2')
commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertEqual(len(result), 4)
self.assertTrue('diff' in result)
self.assertTrue('parent_diff' in result)
self.assertTrue('base_commit_id' in result)
self.assertTrue('commit_id' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'd2015ff5fd0297fd7f1210612f87b6b3')
self.assertEqual(result['parent_diff'], None)
self.assertEqual(result['base_commit_id'], base_commit_id)
self.assertEqual(result['commit_id'], commit_id)
def test_parse_revision_spec_no_args(self):
"""Testing GitClient.parse_revision_spec with no specified revisions"""
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
tip_commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec()
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_no_args_parent(self):
"""Testing GitClient.parse_revision_spec with no specified revisions and a parent diff"""
parent_base_commit_id = self._git_get_head()
self._run_git(['fetch', 'origin'])
self._run_git(['checkout', '-b', 'parent-branch', '--track',
'origin/not-master'])
base_commit_id = self._git_get_head()
self._run_git(['checkout', '-b', 'topic-branch'])
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
tip_commit_id = self._git_get_head()
self.options.parent_branch = 'parent-branch'
self.client.get_repository_info()
revisions = self.client.parse_revision_spec()
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' in revisions)
self.assertEqual(revisions['parent_base'], parent_base_commit_id)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg(self):
"""Testing GitClient.parse_revision_spec with one specified revision"""
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
tip_commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([tip_commit_id])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg_parent(self):
"""Testing GitClient.parse_revision_spec with one specified revision and a parent diff"""
parent_base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
base_commit_id = self._git_get_head()
self._git_add_file_commit('foo.txt', FOO3, 'Commit 3')
tip_commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec([tip_commit_id])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' in revisions)
self.assertEqual(revisions['parent_base'], parent_base_commit_id)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_two_args(self):
"""Testing GitClient.parse_revision_spec with two specified revisions"""
base_commit_id = self._git_get_head()
self._run_git(['checkout', '-b', 'topic-branch'])
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
tip_commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec(['master', 'topic-branch'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg_two_revs(self):
"""Testing GitClient.parse_revision_spec with diff-since syntax"""
base_commit_id = self._git_get_head()
self._run_git(['checkout', '-b', 'topic-branch'])
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
tip_commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec(['master..topic-branch'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg_since_merge(self):
"""Testing GitClient.parse_revision_spec with diff-since-merge syntax"""
base_commit_id = self._git_get_head()
self._run_git(['checkout', '-b', 'topic-branch'])
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
tip_commit_id = self._git_get_head()
self.client.get_repository_info()
revisions = self.client.parse_revision_spec(['master...topic-branch'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_get_raw_commit_message(self):
"""Testing GitClient.get_raw_commit_message"""
self._git_add_file_commit('foo.txt', FOO2, 'Commit 2')
self.client.get_repository_info()
revisions = self.client.parse_revision_spec()
self.assertEqual(self.client.get_raw_commit_message(revisions),
'Commit 2')
class MercurialTestBase(SCMClientTests):
def setUp(self):
super(MercurialTestBase, self).setUp()
self._hg_env = {}
def _run_hg(self, command, ignore_errors=False, extra_ignore_errors=()):
# We're *not* doing `env = env or {}` here because
# we want the caller to be able to *enable* reading
# of user and system-level hgrc configuration.
env = self._hg_env.copy()
if not env:
env = {
'HGRCPATH': os.devnull,
'HGPLAIN': '1',
}
return execute(['hg'] + command, env, split_lines=False,
ignore_errors=ignore_errors,
extra_ignore_errors=extra_ignore_errors,
translate_newlines=True)
def _hg_add_file_commit(self, filename, data, msg, branch=None):
outfile = open(filename, 'w')
outfile.write(data)
outfile.close()
if branch:
self._run_hg(['branch', branch])
self._run_hg(['add', filename])
self._run_hg(['commit', '-m', msg])
class MercurialClientTests(MercurialTestBase):
TESTSERVER = 'http://127.0.0.1:8080'
CLONE_HGRC = dedent("""
[paths]
default = %(hg_dir)s
cloned = %(clone_dir)s
[reviewboard]
url = %(test_server)s
[diff]
git = true
""").rstrip()
def setUp(self):
super(MercurialClientTests, self).setUp()
if not is_exe_in_path('hg'):
raise SkipTest('hg not found in path')
self.hg_dir = os.path.join(self.clients_dir, 'testdata', 'hg-repo')
self.clone_dir = self.chdir_tmp()
self._run_hg(['clone', self.hg_dir, self.clone_dir])
self.client = MercurialClient(options=self.options)
clone_hgrc = open(self.clone_hgrc_path, 'wb')
clone_hgrc.write(self.CLONE_HGRC % {
'hg_dir': self.hg_dir,
'clone_dir': self.clone_dir,
'test_server': self.TESTSERVER,
})
clone_hgrc.close()
self.options.parent_branch = None
def _hg_get_tip(self):
return self._run_hg(['identify']).split()[0]
@property
def clone_hgrc_path(self):
return os.path.join(self.clone_dir, '.hg', 'hgrc')
def test_get_repository_info_simple(self):
"""Testing MercurialClient get_repository_info, simple case"""
ri = self.client.get_repository_info()
self.assertTrue(isinstance(ri, RepositoryInfo))
self.assertEqual('', ri.base_path)
hgpath = ri.path
if os.path.basename(hgpath) == '.hg':
hgpath = os.path.dirname(hgpath)
self.assertEqual(self.hg_dir, hgpath)
self.assertTrue(ri.supports_parent_diffs)
self.assertFalse(ri.supports_changesets)
def test_scan_for_server_simple(self):
"""Testing MercurialClient scan_for_server, simple case"""
os.rename(self.clone_hgrc_path,
os.path.join(self.clone_dir, '._disabled_hgrc'))
self.client.hgrc = {}
self.client._load_hgrc()
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertTrue(server is None)
def test_scan_for_server_when_present_in_hgrc(self):
"""Testing MercurialClient scan_for_server when present in hgrc"""
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertEqual(self.TESTSERVER, server)
def test_scan_for_server_reviewboardrc(self):
"""Testing MercurialClient scan_for_server when in .reviewboardrc"""
rc = open(os.path.join(self.clone_dir, '.reviewboardrc'), 'w')
rc.write('REVIEWBOARD_URL = "%s"' % self.TESTSERVER)
rc.close()
self.client.config = load_config()
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertEqual(self.TESTSERVER, server)
def test_diff_simple(self):
"""Testing MercurialClient diff, simple case"""
self._hg_add_file_commit('foo.txt', FOO1, 'delete and modify stuff')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'68c2bdccf52a4f0baddd0ac9f2ecb7d2')
def test_diff_simple_multiple(self):
"""Testing MercurialClient diff with multiple commits"""
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'9c8796936646be5c7349973b0fceacbd')
def test_diff_exclude(self):
"""Testing MercurialClient diff with file exclusion."""
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('exclude.txt', FOO2, 'commit 2')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'68c2bdccf52a4f0baddd0ac9f2ecb7d2')
def test_diff_exclude_empty(self):
"""Testing MercurialClient diff with empty file exclusion."""
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('empty.txt', '', 'commit 2')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, exclude_patterns=['empty.txt'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'68c2bdccf52a4f0baddd0ac9f2ecb7d2')
def test_diff_branch_diverge(self):
"""Testing MercurialClient diff with diverged branch"""
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._run_hg(['branch', 'diverged'])
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'6b12723baab97f346aa938005bc4da4d')
self._run_hg(['update', '-C', 'default'])
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'68c2bdccf52a4f0baddd0ac9f2ecb7d2')
def test_diff_parent_diff_simple(self):
"""Testing MercurialClient parent diffs with a simple case"""
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3')
revisions = self.client.parse_revision_spec(['2', '3'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('parent_diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'7a897f68a9dc034fc1e42fe7a33bb808')
self.assertEqual(md5(result['parent_diff']).hexdigest(),
'5cacbd79800a9145f982dcc0908b6068')
def test_diff_parent_diff_branch_diverge(self):
"""Testing MercurialClient parent diffs with a diverged branch"""
# This test is very similar to test_diff_parent_diff_simple except
# we throw a branch into the mix.
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._run_hg(['branch', 'diverged'])
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3')
revisions = self.client.parse_revision_spec(['2', '3'])
result = self.client.diff(revisions)
self.assertTrue('parent_diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'7a897f68a9dc034fc1e42fe7a33bb808')
self.assertEqual(md5(result['parent_diff']).hexdigest(),
'5cacbd79800a9145f982dcc0908b6068')
def test_diff_parent_diff_simple_with_arg(self):
"""Testing MercurialClient parent diffs with a diverged branch and --parent option"""
# This test is very similar to test_diff_parent_diff_simple except
# we use the --parent option to post without explicit revisions
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3')
self.options.parent_branch = '2'
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('parent_diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'7a897f68a9dc034fc1e42fe7a33bb808')
self.assertEqual(md5(result['parent_diff']).hexdigest(),
'5cacbd79800a9145f982dcc0908b6068')
def test_parse_revision_spec_no_args(self):
"""Testing MercurialClient.parse_revision_spec with no arguments"""
base = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
tip = self._hg_get_tip()
revisions = self.client.parse_revision_spec([])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base)
self.assertEqual(revisions['tip'], tip)
def test_parse_revision_spec_one_arg_periods(self):
"""Testing MercurialClient.parse_revision_spec with r1..r2 syntax"""
base = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
tip = self._hg_get_tip()
revisions = self.client.parse_revision_spec(['0..1'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base)
self.assertEqual(revisions['tip'], tip)
def test_parse_revision_spec_one_arg_colons(self):
"""Testing MercurialClient.parse_revision_spec with r1::r2 syntax"""
base = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
tip = self._hg_get_tip()
revisions = self.client.parse_revision_spec(['0..1'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base)
self.assertEqual(revisions['tip'], tip)
def test_parse_revision_spec_one_arg(self):
"""Testing MercurialClient.parse_revision_spec with one revision"""
base = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
tip = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
revisions = self.client.parse_revision_spec(['1'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base)
self.assertEqual(revisions['tip'], tip)
def test_parse_revision_spec_two_args(self):
"""Testing MercurialClient.parse_revision_spec with two revisions"""
base = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
tip = self._hg_get_tip()
revisions = self.client.parse_revision_spec(['0', '2'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base)
self.assertEqual(revisions['tip'], tip)
def test_parse_revision_spec_parent_base(self):
"""Testing MercurialClient.parse_revision_spec with parent base"""
start_base = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
commit1 = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2')
commit2 = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3')
commit3 = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO4, 'commit 4')
commit4 = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO5, 'commit 5')
self.assertEqual(
self.client.parse_revision_spec(['1', '2']),
dict(base=commit1, tip=commit2, parent_base=start_base))
self.assertEqual(
self.client.parse_revision_spec(['4']),
dict(base=commit3, tip=commit4, parent_base=start_base,
commit_id=commit4))
self.assertEqual(
self.client.parse_revision_spec(['2', '4']),
dict(base=commit2, tip=commit4, parent_base=start_base))
def test_guess_summary_description_one(self):
"""Testing MercurialClient guess summary & description 1 commit."""
self.options.guess_summary = True
self.options.guess_description = True
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1')
revisions = self.client.parse_revision_spec([])
commit_message = self.client.get_commit_message(revisions)
self.assertEqual(commit_message['summary'], 'commit 1')
def test_guess_summary_description_two(self):
"""Testing MercurialClient guess summary & description 2 commits."""
self.options.guess_summary = True
self.options.guess_description = True
self._hg_add_file_commit('foo.txt', FOO1, 'summary 1\n\nbody 1')
self._hg_add_file_commit('foo.txt', FOO2, 'summary 2\n\nbody 2')
revisions = self.client.parse_revision_spec([])
commit_message = self.client.get_commit_message(revisions)
self.assertEqual(commit_message['summary'], 'summary 1')
self.assertEqual(commit_message['description'],
'body 1\n\nsummary 2\n\nbody 2')
def test_guess_summary_description_three(self):
"""Testing MercurialClient guess summary & description 3 commits."""
self.options.guess_summary = True
self.options.guess_description = True
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1\n\ndesc1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2\n\ndesc2')
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3\n\ndesc3')
revisions = self.client.parse_revision_spec([])
commit_message = self.client.get_commit_message(revisions)
self.assertEqual(commit_message['summary'], 'commit 1')
self.assertEqual(commit_message['description'],
'desc1\n\ncommit 2\n\ndesc2\n\ncommit 3\n\ndesc3')
def test_guess_summary_description_one_middle(self):
"""Testing MercurialClient guess summary & description middle commit commit."""
self.options.guess_summary = True
self.options.guess_description = True
self._hg_add_file_commit('foo.txt', FOO1, 'commit 1\n\ndesc1')
self._hg_add_file_commit('foo.txt', FOO2, 'commit 2\n\ndesc2')
tip = self._hg_get_tip()
self._hg_add_file_commit('foo.txt', FOO3, 'commit 3\n\ndesc3')
revisions = self.client.parse_revision_spec([tip])
commit_message = self.client.get_commit_message(revisions)
self.assertEqual(commit_message['summary'], 'commit 2')
self.assertEqual(commit_message['description'], 'desc2')
class MercurialSubversionClientTests(MercurialTestBase):
TESTSERVER = "http://127.0.0.1:8080"
def __init__(self, *args, **kwargs):
self._tmpbase = ''
self.clone_dir = ''
self.svn_repo = ''
self.svn_checkout = ''
self.client = None
self._svnserve_pid = 0
self._max_svnserve_pid_tries = 12
self._svnserve_port = os.environ.get('SVNSERVE_PORT')
self._required_exes = ('svnadmin', 'svnserve', 'svn')
MercurialTestBase.__init__(self, *args, **kwargs)
def setUp(self):
super(MercurialSubversionClientTests, self).setUp()
self._hg_env = {'FOO': 'BAR'}
# Make sure hgsubversion is enabled.
#
# This will modify the .hgrc in the temp home directory created
# for these tests.
#
# The "hgsubversion =" tells Mercurial to check for hgsubversion
# in the default PYTHONPATH.
fp = open('%s/.hgrc' % os.environ['HOME'], 'w')
fp.write('[extensions]\n')
fp.write('hgsubversion =\n')
fp.close()
for exe in self._required_exes:
if not is_exe_in_path(exe):
raise SkipTest('missing svn stuff! giving up!')
if not self._has_hgsubversion():
raise SkipTest('unable to use `hgsubversion` extension! '
'giving up!')
if not self._tmpbase:
self._tmpbase = self.create_tmp_dir()
self._create_svn_repo()
self._fire_up_svnserve()
self._fill_in_svn_repo()
try:
self._get_testing_clone()
except (OSError, IOError):
msg = 'could not clone from svn repo! skipping...'
raise SkipTest(msg).with_traceback(sys.exc_info()[2])
self._spin_up_client()
self._stub_in_config_and_options()
def _has_hgsubversion(self):
try:
output = self._run_hg(['svn', '--help'], ignore_errors=True,
extra_ignore_errors=(255))
except OSError:
return False
return not re.search("unknown command ['\"]svn['\"]", output, re.I)
def tearDown(self):
super(MercurialSubversionClientTests, self).tearDown()
os.kill(self._svnserve_pid, 9)
def _svn_add_file_commit(self, filename, data, msg, add_file=True):
outfile = open(filename, 'w')
outfile.write(data)
outfile.close()
if add_file:
execute(['svn', 'add', filename], ignore_errors=True)
execute(['svn', 'commit', '-m', msg])
def _create_svn_repo(self):
self.svn_repo = os.path.join(self._tmpbase, 'svnrepo')
execute(['svnadmin', 'create', self.svn_repo])
def _fire_up_svnserve(self):
if not self._svnserve_port:
self._svnserve_port = str(randint(30000, 40000))
pid_file = os.path.join(self._tmpbase, 'svnserve.pid')
execute(['svnserve', '--pid-file', pid_file, '-d',
'--listen-port', self._svnserve_port, '-r', self._tmpbase])
for i in range(0, self._max_svnserve_pid_tries):
try:
self._svnserve_pid = int(open(pid_file).read().strip())
return
except (IOError, OSError):
time.sleep(0.25)
# This will re-raise the last exception, which will be either
# IOError or OSError if the above fails and this branch is reached
raise
def _fill_in_svn_repo(self):
self.svn_checkout = os.path.join(self._tmpbase, 'checkout.svn')
execute(['svn', 'checkout', 'file://%s' % self.svn_repo,
self.svn_checkout])
os.chdir(self.svn_checkout)
for subtree in ('trunk', 'branches', 'tags'):
execute(['svn', 'mkdir', subtree])
execute(['svn', 'commit', '-m', 'filling in T/b/t'])
os.chdir(os.path.join(self.svn_checkout, 'trunk'))
for i, data in enumerate([FOO, FOO1, FOO2]):
self._svn_add_file_commit('foo.txt', data, 'foo commit %s' % i,
add_file=(i == 0))
def _get_testing_clone(self):
self.clone_dir = os.path.join(self._tmpbase, 'checkout.hg')
self._run_hg([
'clone', 'svn://127.0.0.1:%s/svnrepo' % self._svnserve_port,
self.clone_dir,
])
def _spin_up_client(self):
os.chdir(self.clone_dir)
self.client = MercurialClient(options=self.options)
def _stub_in_config_and_options(self):
self.options.parent_branch = None
def testGetRepositoryInfoSimple(self):
"""Testing MercurialClient (+svn) get_repository_info, simple case"""
ri = self.client.get_repository_info()
self.assertEqual('svn', self.client._type)
self.assertEqual('/trunk', ri.base_path)
self.assertEqual('svn://127.0.0.1:%s/svnrepo' % self._svnserve_port,
ri.path)
def testCalculateRepositoryInfo(self):
"""Testing MercurialClient (+svn) _calculate_hgsubversion_repository_info properly determines repository and base paths."""
info = (
"URL: svn+ssh://testuser@svn.example.net/repo/trunk\n"
"Repository Root: svn+ssh://testuser@svn.example.net/repo\n"
"Repository UUID: bfddb570-5023-0410-9bc8-bc1659bf7c01\n"
"Revision: 9999\n"
"Node Kind: directory\n"
"Last Changed Author: user\n"
"Last Changed Rev: 9999\n"
"Last Changed Date: 2012-09-05 18:04:28 +0000 (Wed, 05 Sep 2012)")
repo_info = self.client._calculate_hgsubversion_repository_info(info)
self.assertEqual(repo_info.path, "svn+ssh://svn.example.net/repo")
self.assertEqual(repo_info.base_path, "/trunk")
def testScanForServerSimple(self):
"""Testing MercurialClient (+svn) scan_for_server, simple case"""
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertTrue(server is None)
def testScanForServerReviewboardrc(self):
"""Testing MercurialClient (+svn) scan_for_server in .reviewboardrc"""
rc_filename = os.path.join(self.clone_dir, '.reviewboardrc')
rc = open(rc_filename, 'w')
rc.write('REVIEWBOARD_URL = "%s"' % self.TESTSERVER)
rc.close()
self.client.config = load_config()
ri = self.client.get_repository_info()
server = self.client.scan_for_server(ri)
self.assertEqual(self.TESTSERVER, server)
def testScanForServerProperty(self):
"""Testing MercurialClient (+svn) scan_for_server in svn property"""
os.chdir(self.svn_checkout)
execute(['svn', 'update'])
execute(['svn', 'propset', 'reviewboard:url', self.TESTSERVER,
self.svn_checkout])
execute(['svn', 'commit', '-m', 'adding reviewboard:url property'])
os.chdir(self.clone_dir)
self._run_hg(['pull'])
self._run_hg(['update', '-C'])
ri = self.client.get_repository_info()
self.assertEqual(self.TESTSERVER, self.client.scan_for_server(ri))
def testDiffSimple(self):
"""Testing MercurialClient (+svn) diff, simple case"""
self.client.get_repository_info()
self._hg_add_file_commit('foo.txt', FOO4, 'edit 4')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'2eb0a5f2149232c43a1745d90949fcd5')
self.assertEqual(result['parent_diff'], None)
def testDiffSimpleMultiple(self):
"""Testing MercurialClient (+svn) diff with multiple commits"""
self.client.get_repository_info()
self._hg_add_file_commit('foo.txt', FOO4, 'edit 4')
self._hg_add_file_commit('foo.txt', FOO5, 'edit 5')
self._hg_add_file_commit('foo.txt', FOO6, 'edit 6')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'3d007394de3831d61e477cbcfe60ece8')
self.assertEqual(result['parent_diff'], None)
def testDiffOfRevision(self):
"""Testing MercurialClient (+svn) diff specifying a revision."""
self.client.get_repository_info()
self._hg_add_file_commit('foo.txt', FOO4, 'edit 4', branch='b')
self._hg_add_file_commit('foo.txt', FOO5, 'edit 5', branch='b')
self._hg_add_file_commit('foo.txt', FOO6, 'edit 6', branch='b')
self._hg_add_file_commit('foo.txt', FOO4, 'edit 7', branch='b')
revisions = self.client.parse_revision_spec(['3'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'2eb0a5f2149232c43a1745d90949fcd5')
self.assertEqual(result['parent_diff'], None)
def svn_version_set_hash(svn16_hash, svn17_hash):
"""Pass the appropriate hash to the wrapped function.
SVN 1.6 and 1.7+ will generate slightly different output for ``svn diff``
when generating the diff with a working copy. This works around that by
checking the installed SVN version and passing the appropriate hash.
"""
def decorator(f):
@wraps(f)
def wrapped(self):
self.client.get_repository_info()
if self.client.subversion_client_version < (1, 7):
return f(self, svn16_hash)
else:
return f(self, svn17_hash)
return wrapped
return decorator
class SVNClientTests(SCMClientTests):
def setUp(self):
super(SVNClientTests, self).setUp()
if not is_exe_in_path('svn'):
raise SkipTest('svn not found in path')
self.svn_dir = os.path.join(self.clients_dir, 'testdata', 'svn-repo')
self.clone_dir = self.chdir_tmp()
self._run_svn(['co', 'file://' + self.svn_dir, 'svn-repo'])
os.chdir(os.path.join(self.clone_dir, 'svn-repo'))
self.client = SVNClient(options=self.options)
self.options.svn_show_copies_as_adds = None
def _run_svn(self, command):
return execute(['svn'] + command, env=None, split_lines=False,
ignore_errors=False, extra_ignore_errors=(),
translate_newlines=True)
def _svn_add_file(self, filename, data, changelist=None):
"""Add a file to the test repo."""
is_new = not os.path.exists(filename)
f = open(filename, 'w')
f.write(data)
f.close()
if is_new:
self._run_svn(['add', filename])
if changelist:
self._run_svn(['changelist', changelist, filename])
def _svn_add_dir(self, dirname):
"""Add a directory to the test repo."""
if not os.path.exists(dirname):
os.mkdir(dirname)
self._run_svn(['add', dirname])
def test_relative_paths(self):
"""Testing SVNRepositoryInfo._get_relative_path"""
info = SVNRepositoryInfo('http://svn.example.com/svn/', '/', '')
self.assertEqual(info._get_relative_path('/foo', '/bar'), None)
self.assertEqual(info._get_relative_path('/', '/trunk/myproject'),
None)
self.assertEqual(info._get_relative_path('/trunk/myproject', '/'),
'/trunk/myproject')
self.assertEqual(
info._get_relative_path('/trunk/myproject', ''),
'/trunk/myproject')
self.assertEqual(
info._get_relative_path('/trunk/myproject', '/trunk'),
'/myproject')
self.assertEqual(
info._get_relative_path('/trunk/myproject', '/trunk/myproject'),
'/')
def test_parse_revision_spec_no_args(self):
"""Testing SVNClient.parse_revision_spec with no specified revisions"""
revisions = self.client.parse_revision_spec()
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 'BASE')
self.assertEqual(revisions['tip'], '--rbtools-working-copy')
def test_parse_revision_spec_one_revision(self):
"""Testing SVNClient.parse_revision_spec with one specified numeric revision"""
revisions = self.client.parse_revision_spec(['3'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 2)
self.assertEqual(revisions['tip'], 3)
def test_parse_revision_spec_one_revision_changelist(self):
"""Testing SVNClient.parse_revision_spec with one specified changelist revision"""
self._svn_add_file('foo.txt', FOO3, 'my-change')
revisions = self.client.parse_revision_spec(['my-change'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 'BASE')
self.assertEqual(revisions['tip'],
SVNClient.REVISION_CHANGELIST_PREFIX + 'my-change')
def test_parse_revision_spec_one_revision_nonexistant_changelist(self):
"""Testing SVNClient.parse_revision_spec with one specified invalid changelist revision"""
self._svn_add_file('foo.txt', FOO3, 'my-change')
self.assertRaises(
InvalidRevisionSpecError,
lambda: self.client.parse_revision_spec(['not-my-change']))
def test_parse_revision_spec_one_arg_two_revisions(self):
"""Testing SVNClient.parse_revision_spec with R1:R2 syntax"""
revisions = self.client.parse_revision_spec(['1:3'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1)
self.assertEqual(revisions['tip'], 3)
def test_parse_revision_spec_two_arguments(self):
"""Testing SVNClient.parse_revision_spec with two revisions"""
revisions = self.client.parse_revision_spec(['1', '3'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1)
self.assertEqual(revisions['tip'], 3)
def test_parse_revision_spec_one_revision_url(self):
"""Testing SVNClient.parse_revision_spec with one revision and a repository URL"""
self.options.repository_url = \
'http://svn.apache.org/repos/asf/subversion/trunk'
revisions = self.client.parse_revision_spec(['1549823'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1549822)
self.assertEqual(revisions['tip'], 1549823)
def test_parse_revision_spec_two_revisions_url(self):
"""Testing SVNClient.parse_revision_spec with R1:R2 syntax and a repository URL"""
self.options.repository_url = \
'http://svn.apache.org/repos/asf/subversion/trunk'
revisions = self.client.parse_revision_spec(['1549823:1550211'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1549823)
self.assertEqual(revisions['tip'], 1550211)
def test_parse_revision_spec_invalid_spec(self):
"""Testing SVNClient.parse_revision_spec with invalid specifications"""
self.assertRaises(InvalidRevisionSpecError,
self.client.parse_revision_spec,
['aoeu'])
self.assertRaises(InvalidRevisionSpecError,
self.client.parse_revision_spec,
['aoeu', '1234'])
self.assertRaises(TooManyRevisionsError,
self.client.parse_revision_spec,
['1', '2', '3'])
def test_parse_revision_spec_non_unicode_log(self):
"""Testing SVNClient.parse_revision_spec with a non-utf8 log entry"""
# Note: the svn log entry for commit r2 contains one non-utf8 character
revisions = self.client.parse_revision_spec(['2'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1)
self.assertEqual(revisions['tip'], 2)
def test_diff_exclude(self):
"""Testing SVNClient diff with file exclude patterns"""
self._svn_add_file('foo.txt', FOO1)
self._svn_add_file('exclude.txt', FOO2)
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions,
exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'1d2b00abce632d104127a2d3673770a1')
def test_diff_exclude_in_subdir(self):
"""Testing SVNClient diff with exclude patterns in a subdir"""
self._svn_add_file('foo.txt', FOO1)
self._svn_add_dir('subdir')
self._svn_add_file(os.path.join('subdir', 'exclude.txt'), FOO2)
os.chdir('subdir')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(
revisions,
exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(result['diff'], '')
def test_diff_exclude_root_pattern_in_subdir(self):
"""Testing SVNClient diff with repo exclude patterns in a subdir"""
self._svn_add_file('exclude.txt', FOO1)
self._svn_add_dir('subdir')
os.chdir('subdir')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(
revisions,
exclude_patterns=[os.path.join(os.path.sep, 'exclude.txt'),
'.'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(result['diff'], '')
@svn_version_set_hash('043befc507b8177a0f010dc2cecc4205',
'1b68063237c584d38a9a3ddbdf1f72a2')
def test_same_diff_multiple_methods(self, md5_sum):
"""Testing SVNClient identical diff generated from root, subdirectory,
and via target"""
# Test diff generation for a single file, where 'svn diff' is invoked
# from three different locations. This should result in an identical
# diff for all three cases. Add a new subdirectory and file
# (dir1/A.txt) which will be the lone change captured in the diff.
# Cases:
# 1) Invoke 'svn diff' from checkout root.
# 2) Invoke 'svn diff' from dir1/ subdirectory.
# 3) Create dir2/ subdirectory parallel to dir1/. Invoke 'svn diff'
# from dir2/ where '../dir1/A.txt' is provided as a specific
# target.
#
# This test is inspired by #3749 which broke cases 2 and 3.
self._svn_add_dir('dir1')
self._svn_add_file('dir1/A.txt', FOO3)
# Case 1: Generate diff from checkout root.
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
# Case 2: Generate diff from dir1 subdirectory.
os.chdir('dir1')
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
# Case 3: Generate diff from dir2 subdirectory, but explicitly target
# only ../dir1/A.txt.
os.chdir('..')
self._svn_add_dir('dir2')
os.chdir('dir2')
result = self.client.diff(revisions, ['../dir1/A.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
@svn_version_set_hash('902d662a110400f7470294b2d9e72d36',
'13803373ded9af750384a4601d5173ce')
def test_diff_non_unicode_characters(self, md5_sum):
"""Testing SVNClient diff with a non-utf8 file"""
self._svn_add_file('A.txt', '\xe2'.encode('iso-8859-1'))
self._run_svn(['propset', 'svn:mime-type', 'text/plain', 'A.txt'])
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
@svn_version_set_hash('79cbd5c4974f97d173ee87c50fa9cff2',
'bfa99e54b8c23b97b1dee23d2763c4fd')
def test_diff_non_unicode_filename(self, md5_sum):
"""Testing SVNClient diff with a non-utf8 filename"""
self.options.svn_show_copies_as_adds = 'y'
filename = '\xe2'
self._run_svn(['copy', 'foo.txt', filename])
self._run_svn(['propset', 'svn:mime-type', 'text/plain', filename])
# Generate identical diff from checkout root and via changelist.
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
self._run_svn(['changelist', 'cl1', filename])
revisions = self.client.parse_revision_spec(['cl1'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
def test_show_copies_as_adds_enabled(self):
"""Testing SVNClient with --show-copies-as-adds functionality
enabled"""
self.check_show_copies_as_adds('y', 'ac1835240ec86ee14ddccf1f2236c442')
def test_show_copies_as_adds_disabled(self):
"""Testing SVNClient with --show-copies-as-adds functionality
disabled"""
self.check_show_copies_as_adds('n', 'd41d8cd98f00b204e9800998ecf8427e')
def check_show_copies_as_adds(self, state, md5str):
"""Helper function to evaluate --show-copies as adds"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'--show-copies-as-adds.')
self.options.svn_show_copies_as_adds = state
self._svn_add_dir('dir1')
self._svn_add_dir('dir2')
self._run_svn(['copy', 'foo.txt', 'dir1'])
# Generate identical diff from checkout root, via changelist, and via
# explicit include target.
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5str)
self._run_svn(['changelist', 'cl1', 'dir1/foo.txt'])
revisions = self.client.parse_revision_spec(['cl1'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5str)
self._run_svn(['changelist', '--remove', 'dir1/foo.txt'])
os.chdir('dir2')
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions, ['../dir1'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5str)
def test_history_scheduled_with_commit_nominal(self):
"""Testing SVNClient.history_scheduled_with_commit nominal cases"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'history_scheduled_with_commit().')
self._svn_add_dir('dir1')
self._svn_add_dir('dir2')
self._run_svn(['copy', 'foo.txt', 'dir1'])
# Squash stderr to prevent error message in test output.
sys.stderr = StringIO()
# Ensure SystemExit is raised when attempting to generate diff from
# checkout root, via changelist, and via explicit include target.
revisions = self.client.parse_revision_spec()
self.assertRaises(SystemExit, self.client.diff, revisions)
self._run_svn(['changelist', 'cl1', 'dir1/foo.txt'])
revisions = self.client.parse_revision_spec(['cl1'])
self.assertRaises(SystemExit, self.client.diff, revisions)
self._run_svn(['changelist', '--remove', 'dir1/foo.txt'])
os.chdir('dir2')
revisions = self.client.parse_revision_spec()
self.assertRaises(SystemExit, self.client.diff, revisions, ['../dir1'])
def test_history_scheduled_with_commit_special_case_changelist(self):
"""Testing SVNClient.history_scheduled_with_commit ignore history in
changelist"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'history_scheduled_with_commit().')
# Add file with history to changelist, then generate diff from checkout
# root. In this case there should be no SystemExit raised and an
# (empty) diff should be produced.
self._run_svn(['copy', 'foo.txt', 'foo_copy.txt'])
self._run_svn(['changelist', 'cl1', 'foo_copy.txt'])
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'd41d8cd98f00b204e9800998ecf8427e')
def test_history_scheduled_with_commit_special_case_exclude(self):
"""Testing SVNClient.history_scheduled_with_commit with exclude file"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'history_scheduled_with_commit().')
# Lone file with history is also excluded. In this case there should
# be no SystemExit raised and an (empty) diff should be produced.
self._run_svn(['copy', 'foo.txt', 'foo_copy.txt'])
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, [], ['foo_copy.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'd41d8cd98f00b204e9800998ecf8427e')
class P4WrapperTests(RBTestBase):
def is_supported(self):
return True
def test_counters(self):
"""Testing P4Wrapper.counters"""
class TestWrapper(P4Wrapper):
def run_p4(self, cmd, *args, **kwargs):
return [
'a = 1\n',
'b = 2\n',
'c = 3\n',
]
p4 = TestWrapper(None)
info = p4.counters()
self.assertEqual(len(info), 3)
self.assertEqual(info['a'], '1')
self.assertEqual(info['b'], '2')
self.assertEqual(info['c'], '3')
def test_info(self):
"""Testing P4Wrapper.info"""
class TestWrapper(P4Wrapper):
def run_p4(self, cmd, *args, **kwargs):
return [
'User name: myuser\n',
'Client name: myclient\n',
'Client host: myclient.example.com\n',
'Client root: /path/to/client\n',
'Server uptime: 111:43:38\n',
]
p4 = TestWrapper(None)
info = p4.info()
self.assertEqual(len(info), 5)
self.assertEqual(info['User name'], 'myuser')
self.assertEqual(info['Client name'], 'myclient')
self.assertEqual(info['Client host'], 'myclient.example.com')
self.assertEqual(info['Client root'], '/path/to/client')
self.assertEqual(info['Server uptime'], '111:43:38')
class PerforceClientTests(SCMClientTests):
class P4DiffTestWrapper(P4Wrapper):
def __init__(self, options):
super(
PerforceClientTests.P4DiffTestWrapper, self).__init__(options)
self._timestamp = time.mktime(time.gmtime(0))
def fstat(self, depot_path, fields=[]):
assert depot_path in self.fstat_files
fstat_info = self.fstat_files[depot_path]
for field in fields:
assert field in fstat_info
return fstat_info
def opened(self, changenum):
return [info for info in self.repo_files
if info['change'] == changenum]
def print_file(self, depot_path, out_file):
for info in self.repo_files:
if depot_path == '%s#%s' % (info['depotFile'], info['rev']):
fp = open(out_file, 'w')
fp.write(info['text'])
fp.close()
return
assert False
def where(self, depot_path):
assert depot_path in self.where_files
return [{
'path': self.where_files[depot_path],
}]
def change(self, changenum):
return [{
'Change': str(changenum),
'Date': '2013/01/02 22:33:44',
'User': 'joe@example.com',
'Status': 'pending',
'Description': 'This is a test.\n',
}]
def info(self):
return {
'Client root': '/',
}
def run_p4(self, *args, **kwargs):
assert False
def test_scan_for_server_counter_with_reviewboard_url(self):
"""Testing PerforceClient.scan_for_server_counter with reviewboard.url"""
RB_URL = 'http://reviewboard.example.com/'
class TestWrapper(P4Wrapper):
def counters(self):
return {
'reviewboard.url': RB_URL,
'foo': 'bar',
}
client = PerforceClient(TestWrapper)
url = client.scan_for_server_counter(None)
self.assertEqual(url, RB_URL)
def test_repository_info(self):
"""Testing PerforceClient.get_repository_info"""
SERVER_PATH = 'perforce.example.com:1666'
class TestWrapper(P4Wrapper):
def is_supported(self):
return True
def info(self):
return {
'Client root': os.getcwd(),
'Server address': SERVER_PATH,
'Server version': 'P4D/FREEBSD60X86_64/2012.2/525804 '
'(2012/09/18)',
}
client = PerforceClient(TestWrapper)
info = client.get_repository_info()
self.assertNotEqual(info, None)
self.assertEqual(info.path, SERVER_PATH)
self.assertEqual(client.p4d_version, (2012, 2))
def test_repository_info_outside_client_root(self):
"""Testing PerforceClient.get_repository_info outside client root"""
SERVER_PATH = 'perforce.example.com:1666'
class TestWrapper(P4Wrapper):
def is_supported(self):
return True
def info(self):
return {
'Client root': '/',
'Server address': SERVER_PATH,
'Server version': 'P4D/FREEBSD60X86_64/2012.2/525804 '
'(2012/09/18)',
}
client = PerforceClient(TestWrapper)
info = client.get_repository_info()
self.assertEqual(info, None)
def test_scan_for_server_counter_with_reviewboard_url_encoded(self):
"""Testing PerforceClient.scan_for_server_counter with encoded reviewboard.url.http:||"""
URL_KEY = 'reviewboard.url.http:||reviewboard.example.com/'
RB_URL = 'http://reviewboard.example.com/'
class TestWrapper(P4Wrapper):
def counters(self):
return {
URL_KEY: '1',
'foo': 'bar',
}
client = PerforceClient(TestWrapper)
url = client.scan_for_server_counter(None)
self.assertEqual(url, RB_URL)
def test_diff_with_pending_changelist(self):
"""Testing PerforceClient.diff with a pending changelist"""
client = self._build_client()
client.p4.repo_files = [
{
'depotFile': '//mydepot/test/README',
'rev': '2',
'action': 'edit',
'change': '12345',
'text': 'This is a test.\n',
},
{
'depotFile': '//mydepot/test/README',
'rev': '3',
'action': 'edit',
'change': '',
'text': 'This is a mess.\n',
},
{
'depotFile': '//mydepot/test/COPYING',
'rev': '1',
'action': 'add',
'change': '12345',
'text': 'Copyright 2013 Joe User.\n',
},
{
'depotFile': '//mydepot/test/Makefile',
'rev': '3',
'action': 'delete',
'change': '12345',
'text': 'all: all\n',
},
]
readme_file = make_tempfile()
copying_file = make_tempfile()
makefile_file = make_tempfile()
client.p4.print_file('//mydepot/test/README#3', readme_file)
client.p4.print_file('//mydepot/test/COPYING#1', copying_file)
client.p4.where_files = {
'//mydepot/test/README': readme_file,
'//mydepot/test/COPYING': copying_file,
'//mydepot/test/Makefile': makefile_file,
}
revisions = client.parse_revision_spec(['12345'])
diff = client.diff(revisions)
self._compare_diff(diff, '07aa18ff67f9aa615fcda7ecddcb354e')
def test_diff_for_submitted_changelist(self):
"""Testing PerforceClient.diff with a submitted changelist"""
class TestWrapper(self.P4DiffTestWrapper):
def change(self, changelist):
return [{
'Change': '12345',
'Date': '2013/12/19 11:32:45',
'User': 'example',
'Status': 'submitted',
'Description': 'My change description\n',
}]
def filelog(self, path):
return [
{
'change0': '12345',
'action0': 'edit',
'rev0': '3',
'depotFile': '//mydepot/test/README',
}
]
client = PerforceClient(TestWrapper)
client.p4.repo_files = [
{
'depotFile': '//mydepot/test/README',
'rev': '2',
'action': 'edit',
'change': '12345',
'text': 'This is a test.\n',
},
{
'depotFile': '//mydepot/test/README',
'rev': '3',
'action': 'edit',
'change': '',
'text': 'This is a mess.\n',
},
]
readme_file = make_tempfile()
client.p4.print_file('//mydepot/test/README#3', readme_file)
client.p4.where_files = {
'//mydepot/test/README': readme_file,
}
client.p4.repo_files = [
{
'depotFile': '//mydepot/test/README',
'rev': '2',
'action': 'edit',
'change': '12345',
'text': 'This is a test.\n',
},
{
'depotFile': '//mydepot/test/README',
'rev': '3',
'action': 'edit',
'change': '',
'text': 'This is a mess.\n',
},
]
revisions = client.parse_revision_spec(['12345'])
diff = client.diff(revisions)
self._compare_diff(diff, '8af5576f5192ca87731673030efb5f39',
expect_changenum=False)
def test_diff_with_moved_files_cap_on(self):
"""Testing PerforceClient.diff with moved files and capability on"""
self._test_diff_with_moved_files(
'5926515eaf4cf6d8257a52f7d9f0e530',
caps={
'scmtools': {
'perforce': {
'moved_files': True
}
}
})
def test_diff_with_moved_files_cap_off(self):
"""Testing PerforceClient.diff with moved files and capability off"""
self._test_diff_with_moved_files('20e5ab395e170dce1b062a796e6c2c13')
def _test_diff_with_moved_files(self, expected_diff_hash, caps={}):
client = self._build_client()
client.capabilities = Capabilities(caps)
client.p4.repo_files = [
{
'depotFile': '//mydepot/test/README',
'rev': '2',
'action': 'move/delete',
'change': '12345',
'text': 'This is a test.\n',
},
{
'depotFile': '//mydepot/test/README-new',
'rev': '1',
'action': 'move/add',
'change': '12345',
'text': 'This is a mess.\n',
},
{
'depotFile': '//mydepot/test/COPYING',
'rev': '2',
'action': 'move/delete',
'change': '12345',
'text': 'Copyright 2013 Joe User.\n',
},
{
'depotFile': '//mydepot/test/COPYING-new',
'rev': '1',
'action': 'move/add',
'change': '12345',
'text': 'Copyright 2013 Joe User.\n',
},
]
readme_file = make_tempfile()
copying_file = make_tempfile()
readme_file_new = make_tempfile()
copying_file_new = make_tempfile()
client.p4.print_file('//mydepot/test/README#2', readme_file)
client.p4.print_file('//mydepot/test/COPYING#2', copying_file)
client.p4.print_file('//mydepot/test/README-new#1', readme_file_new)
client.p4.print_file('//mydepot/test/COPYING-new#1', copying_file_new)
client.p4.where_files = {
'//mydepot/test/README': readme_file,
'//mydepot/test/COPYING': copying_file,
'//mydepot/test/README-new': readme_file_new,
'//mydepot/test/COPYING-new': copying_file_new,
}
client.p4.fstat_files = {
'//mydepot/test/README': {
'clientFile': readme_file,
'movedFile': '//mydepot/test/README-new',
},
'//mydepot/test/README-new': {
'clientFile': readme_file_new,
'depotFile': '//mydepot/test/README-new',
},
'//mydepot/test/COPYING': {
'clientFile': copying_file,
'movedFile': '//mydepot/test/COPYING-new',
},
'//mydepot/test/COPYING-new': {
'clientFile': copying_file_new,
'depotFile': '//mydepot/test/COPYING-new',
},
}
revisions = client.parse_revision_spec(['12345'])
diff = client.diff(revisions)
self._compare_diff(diff, expected_diff_hash)
def _build_client(self):
self.options.p4_client = 'myclient'
self.options.p4_port = 'perforce.example.com:1666'
self.options.p4_passwd = ''
client = PerforceClient(self.P4DiffTestWrapper, options=self.options)
client.p4d_version = (2012, 2)
return client
def _compare_diff(self, diff_info, expected_diff_hash,
expect_changenum=True):
self.assertTrue(isinstance(diff_info, dict))
self.assertTrue('diff' in diff_info)
if expect_changenum:
self.assertTrue('changenum' in diff_info)
diff_content = re.sub(br'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}',
br'1970-01-01 00:00:00',
diff_info['diff'])
self.assertEqual(md5(diff_content).hexdigest(), expected_diff_hash)
def test_parse_revision_spec_no_args(self):
"""Testing PerforceClient.parse_revision_spec with no specified revisions"""
client = self._build_client()
revisions = client.parse_revision_spec()
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertEqual(
revisions['base'], PerforceClient.REVISION_CURRENT_SYNC)
self.assertEqual(
revisions['tip'],
PerforceClient.REVISION_PENDING_CLN_PREFIX + 'default')
def test_parse_revision_spec_pending_cln(self):
"""Testing PerforceClient.parse_revision_spec with a pending changelist"""
class TestWrapper(P4Wrapper):
def change(self, changelist):
return [{
'Change': '12345',
'Date': '2013/12/19 11:32:45',
'User': 'example',
'Status': 'pending',
'Description': 'My change description\n',
}]
client = PerforceClient(TestWrapper)
revisions = client.parse_revision_spec(['12345'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(
revisions['base'], PerforceClient.REVISION_CURRENT_SYNC)
self.assertEqual(
revisions['tip'],
PerforceClient.REVISION_PENDING_CLN_PREFIX + '12345')
def test_parse_revision_spec_submitted_cln(self):
"""Testing PerforceClient.parse_revision_spec with a submitted changelist"""
class TestWrapper(P4Wrapper):
def change(self, changelist):
return [{
'Change': '12345',
'Date': '2013/12/19 11:32:45',
'User': 'example',
'Status': 'submitted',
'Description': 'My change description\n',
}]
client = PerforceClient(TestWrapper)
revisions = client.parse_revision_spec(['12345'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], '12344')
self.assertEqual(revisions['tip'], '12345')
def test_parse_revision_spec_shelved_cln(self):
"""Testing PerforceClient.parse_revision_spec with a shelved changelist"""
class TestWrapper(P4Wrapper):
def change(self, changelist):
return [{
'Change': '12345',
'Date': '2013/12/19 11:32:45',
'User': 'example',
'Status': 'shelved',
'Description': 'My change description\n',
}]
client = PerforceClient(TestWrapper)
revisions = client.parse_revision_spec(['12345'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(
revisions['base'], PerforceClient.REVISION_CURRENT_SYNC)
self.assertEqual(
revisions['tip'],
PerforceClient.REVISION_PENDING_CLN_PREFIX + '12345')
def test_parse_revision_spec_two_args(self):
"""Testing PerforceClient.parse_revision_spec with two changelists"""
class TestWrapper(P4Wrapper):
def change(self, changelist):
change = {
'Change': str(changelist),
'Date': '2013/12/19 11:32:45',
'User': 'example',
'Description': 'My change description\n',
}
if changelist == '99' or changelist == '100':
change['Status'] = 'submitted'
elif changelist == '101':
change['Status'] = 'pending'
elif changelist == '102':
change['Status'] = 'shelved'
else:
assert False
return [change]
client = PerforceClient(TestWrapper)
revisions = client.parse_revision_spec(['99', '100'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], '99')
self.assertEqual(revisions['tip'], '100')
self.assertRaises(InvalidRevisionSpecError,
client.parse_revision_spec,
['99', '101'])
self.assertRaises(InvalidRevisionSpecError,
client.parse_revision_spec,
['99', '102'])
self.assertRaises(InvalidRevisionSpecError,
client.parse_revision_spec,
['101', '100'])
self.assertRaises(InvalidRevisionSpecError,
client.parse_revision_spec,
['102', '100'])
self.assertRaises(InvalidRevisionSpecError,
client.parse_revision_spec,
['102', '10284'])
def test_parse_revision_spec_invalid_spec(self):
"""Testing PerforceClient.parse_revision_spec with invalid specifications"""
class TestWrapper(P4Wrapper):
def change(self, changelist):
return []
client = PerforceClient(TestWrapper)
self.assertRaises(InvalidRevisionSpecError,
client.parse_revision_spec,
['aoeu'])
self.assertRaises(TooManyRevisionsError,
client.parse_revision_spec,
['1', '2', '3'])
def test_diff_exclude(self):
"""Testing PerforceClient.normalize_exclude_patterns"""
repo_root = self.chdir_tmp()
os.mkdir('subdir')
cwd = os.getcwd()
class ExcludeWrapper(P4Wrapper):
def info(self):
return {
'Client root': repo_root,
}
client = PerforceClient(ExcludeWrapper)
patterns = [
"//depot/path",
os.path.join(os.path.sep, "foo"),
"foo",
]
normalized_patterns = [
# Depot paths should remain unchanged.
patterns[0],
# "Absolute" paths (i.e., ones that begin with a path separator)
# should be relative to the repository root.
os.path.join(repo_root, patterns[1][1:]),
# Relative paths should be relative to the current working
# directory.
os.path.join(cwd, patterns[2]),
]
result = client.normalize_exclude_patterns(patterns)
self.assertEqual(result, normalized_patterns)
class BazaarClientTests(SCMClientTests):
def setUp(self):
super(BazaarClientTests, self).setUp()
if not is_exe_in_path("bzr"):
raise SkipTest("bzr not found in path")
self.set_user_home(
os.path.join(self.clients_dir, 'testdata', 'homedir'))
self.orig_dir = os.getcwd()
self.original_branch = self.chdir_tmp()
self._run_bzr(["init", "."])
self._bzr_add_file_commit("foo.txt", FOO, "initial commit")
self.child_branch = mktemp()
self._run_bzr(["branch", self.original_branch, self.child_branch])
self.client = BazaarClient(options=self.options)
os.chdir(self.orig_dir)
self.options.parent_branch = None
def _run_bzr(self, command, *args, **kwargs):
return execute(['bzr'] + command, *args, **kwargs)
def _bzr_add_file_commit(self, file, data, msg):
"""Add a file to a Bazaar repository with the content of data and commit with msg."""
with open(file, 'w') as foo:
foo.write(data)
self._run_bzr(["add", file])
self._run_bzr(["commit", "-m", msg, '--author', 'Test User'])
def _compare_diffs(self, filename, full_diff, expected_diff_digest,
change_type='modified'):
"""Testing that the full_diff for ``filename`` matches the ``expected_diff``."""
diff_lines = full_diff.splitlines()
self.assertEqual(('=== %s file \'%s\''
% (change_type, filename)).encode('utf-8'),
diff_lines[0])
self.assertTrue(diff_lines[1].startswith(
('--- %s\t' % filename).encode('utf-8')))
self.assertTrue(diff_lines[2].startswith(
('+++ %s\t' % filename).encode('utf-8')))
diff_body = b'\n'.join(diff_lines[3:])
self.assertEqual(md5(diff_body).hexdigest(), expected_diff_digest)
def _count_files_in_diff(self, diff):
return len([
line
for line in diff.split(b'\n')
if line.startswith(b'===')
])
def test_get_repository_info_original_branch(self):
"""Testing BazaarClient get_repository_info with original branch"""
os.chdir(self.original_branch)
ri = self.client.get_repository_info()
self.assertTrue(isinstance(ri, RepositoryInfo))
self.assertEqual(os.path.realpath(ri.path),
os.path.realpath(self.original_branch))
self.assertTrue(ri.supports_parent_diffs)
self.assertEqual(ri.base_path, '/')
self.assertFalse(ri.supports_changesets)
def test_get_repository_info_child_branch(self):
"""Testing BazaarClient get_repository_info with child branch"""
os.chdir(self.child_branch)
ri = self.client.get_repository_info()
self.assertTrue(isinstance(ri, RepositoryInfo))
self.assertEqual(os.path.realpath(ri.path),
os.path.realpath(self.child_branch))
self.assertTrue(ri.supports_parent_diffs)
self.assertEqual(ri.base_path, "/")
self.assertFalse(ri.supports_changesets)
def test_get_repository_info_no_branch(self):
"""Testing BazaarClient get_repository_info, no branch"""
self.chdir_tmp()
ri = self.client.get_repository_info()
self.assertEqual(ri, None)
def test_too_many_revisions(self):
"""Testing BazaarClient parse_revision_spec with too many revisions"""
self.assertRaises(TooManyRevisionsError,
self.client.parse_revision_spec,
[1, 2, 3])
def test_diff_simple(self):
"""Testing BazaarClient simple diff case"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "delete and modify stuff")
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs('foo.txt', result['diff'],
'a6326b53933f8b255a4b840485d8e210')
def test_diff_exclude(self):
"""Testing BazaarClient diff with file exclusion."""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
self._bzr_add_file_commit("exclude.txt", FOO2, "commit 2")
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs('foo.txt', result['diff'],
'a6326b53933f8b255a4b840485d8e210')
self.assertEqual(self._count_files_in_diff(result['diff']), 1)
def test_diff_exclude_in_subdir(self):
"""Testing BazaarClient diff with file exclusion in a subdirectory."""
os.chdir(self.child_branch)
self._bzr_add_file_commit('foo.txt', FOO1, 'commit 1')
os.mkdir('subdir')
os.chdir('subdir')
self._bzr_add_file_commit('exclude.txt', FOO2, 'commit 2')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions,
exclude_patterns=['exclude.txt', '.'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs('foo.txt', result['diff'],
'a6326b53933f8b255a4b840485d8e210')
self.assertEqual(self._count_files_in_diff(result['diff']), 1)
def test_diff_exclude_root_pattern_in_subdir(self):
"""Testing BazaarClient diff with file exclusion in the repo root."""
os.chdir(self.child_branch)
self._bzr_add_file_commit('exclude.txt', FOO2, 'commit 1')
os.mkdir('subdir')
os.chdir('subdir')
self._bzr_add_file_commit('foo.txt', FOO1, 'commit 2')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(
revisions,
exclude_patterns=[os.path.sep + 'exclude.txt',
os.path.sep + 'subdir'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs(os.path.join('subdir', 'foo.txt'), result['diff'],
'4deffcb296180fa166eddff2512bd0e4',
change_type='added')
def test_diff_specific_files(self):
"""Testing BazaarClient diff with specific files"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "delete and modify stuff")
self._bzr_add_file_commit("bar.txt", "baz", "added bar")
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, ['foo.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs('foo.txt', result['diff'],
'a6326b53933f8b255a4b840485d8e210')
def test_diff_simple_multiple(self):
"""Testing BazaarClient simple diff with multiple commits case"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
self._bzr_add_file_commit("foo.txt", FOO3, "commit 3")
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs('foo.txt', result['diff'],
'4109cc082dce22288c2f1baca9b107b6')
def test_diff_parent(self):
"""Testing BazaarClient diff with changes only in the parent branch"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "delete and modify stuff")
grand_child_branch = mktemp()
self._run_bzr(["branch", self.child_branch, grand_child_branch])
os.chdir(grand_child_branch)
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(result['diff'], None)
def test_diff_grand_parent(self):
"""Testing BazaarClient diff with changes between a 2nd level descendant"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "delete and modify stuff")
grand_child_branch = mktemp()
self._run_bzr(["branch", self.child_branch, grand_child_branch])
os.chdir(grand_child_branch)
# Requesting the diff between the grand child branch and its grand
# parent:
self.options.parent_branch = self.original_branch
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self._compare_diffs("foo.txt", result['diff'],
'a6326b53933f8b255a4b840485d8e210')
def test_guessed_summary_and_description(self):
"""Testing BazaarClient guessing summary and description"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
self._bzr_add_file_commit("foo.txt", FOO3, "commit 3")
self.options.guess_summary = True
self.options.guess_description = True
revisions = self.client.parse_revision_spec([])
commit_message = self.client.get_commit_message(revisions)
self.assertEqual("commit 3", commit_message['summary'])
description = commit_message['description']
self.assertTrue("commit 1" in description)
self.assertTrue("commit 2" in description)
self.assertFalse("commit 3" in description)
def test_guessed_summary_and_description_in_grand_parent_branch(self):
"""Testing BazaarClient guessing summary and description for grand parent branch."""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
self._bzr_add_file_commit("foo.txt", FOO3, "commit 3")
self.options.guess_summary = True
self.options.guess_description = True
grand_child_branch = mktemp()
self._run_bzr(["branch", self.child_branch, grand_child_branch])
os.chdir(grand_child_branch)
# Requesting the diff between the grand child branch and its grand
# parent:
self.options.parent_branch = self.original_branch
revisions = self.client.parse_revision_spec([])
commit_message = self.client.get_commit_message(revisions)
self.assertEqual("commit 3", commit_message['summary'])
description = commit_message['description']
self.assertTrue("commit 1" in description)
self.assertTrue("commit 2" in description)
self.assertFalse("commit 3" in description)
def test_guessed_summary_and_description_with_revision_range(self):
"""Testing BazaarClient guessing summary and description with a revision range."""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
self._bzr_add_file_commit("foo.txt", FOO3, "commit 3")
self.options.guess_summary = True
self.options.guess_description = True
revisions = self.client.parse_revision_spec(['2..3'])
commit_message = self.client.get_commit_message(revisions)
print(commit_message)
self.assertEqual("commit 2", commit_message['summary'])
self.assertEqual("commit 2", commit_message['description'])
def test_parse_revision_spec_no_args(self):
"""Testing BazaarClient.parse_revision_spec with no specified revisions"""
os.chdir(self.child_branch)
base_commit_id = self.client._get_revno()
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
tip_commit_id = self.client._get_revno()
revisions = self.client.parse_revision_spec()
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg(self):
"""Testing BazaarClient.parse_revision_spec with one specified revision"""
os.chdir(self.child_branch)
base_commit_id = self.client._get_revno()
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
tip_commit_id = self.client._get_revno()
revisions = self.client.parse_revision_spec([tip_commit_id])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg_parent(self):
"""Testing BazaarClient.parse_revision_spec with one specified revision and a parent diff"""
os.chdir(self.original_branch)
parent_base_commit_id = self.client._get_revno()
grand_child_branch = mktemp()
self._run_bzr(["branch", self.child_branch, grand_child_branch])
os.chdir(grand_child_branch)
base_commit_id = self.client._get_revno()
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
tip_commit_id = self.client._get_revno()
self.options.parent_branch = self.child_branch
revisions = self.client.parse_revision_spec([tip_commit_id])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('parent_base' in revisions)
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertEqual(revisions['parent_base'], parent_base_commit_id)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_one_arg_split(self):
"""Testing BazaarClient.parse_revision_spec with R1..R2 syntax"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
base_commit_id = self.client._get_revno()
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
tip_commit_id = self.client._get_revno()
revisions = self.client.parse_revision_spec(
['%s..%s' % (base_commit_id, tip_commit_id)])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('parent_base' not in revisions)
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
def test_parse_revision_spec_two_args(self):
"""Testing BazaarClient.parse_revision_spec with two revisions"""
os.chdir(self.child_branch)
self._bzr_add_file_commit("foo.txt", FOO1, "commit 1")
base_commit_id = self.client._get_revno()
self._bzr_add_file_commit("foo.txt", FOO2, "commit 2")
tip_commit_id = self.client._get_revno()
revisions = self.client.parse_revision_spec(
[base_commit_id, tip_commit_id])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('parent_base' not in revisions)
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertEqual(revisions['base'], base_commit_id)
self.assertEqual(revisions['tip'], tip_commit_id)
FOO = b"""\
ARMA virumque cano, Troiae qui primus ab oris
Italiam, fato profugus, Laviniaque venit
litora, multum ille et terris iactatus et alto
vi superum saevae memorem Iunonis ob iram;
multa quoque et bello passus, dum conderet urbem,
inferretque deos Latio, genus unde Latinum,
Albanique patres, atque altae moenia Romae.
Musa, mihi causas memora, quo numine laeso,
quidve dolens, regina deum tot volvere casus
insignem pietate virum, tot adire labores
impulerit. Tantaene animis caelestibus irae?
"""
FOO1 = b"""\
ARMA virumque cano, Troiae qui primus ab oris
Italiam, fato profugus, Laviniaque venit
litora, multum ille et terris iactatus et alto
vi superum saevae memorem Iunonis ob iram;
multa quoque et bello passus, dum conderet urbem,
inferretque deos Latio, genus unde Latinum,
Albanique patres, atque altae moenia Romae.
Musa, mihi causas memora, quo numine laeso,
"""
FOO2 = b"""\
ARMA virumque cano, Troiae qui primus ab oris
ARMA virumque cano, Troiae qui primus ab oris
ARMA virumque cano, Troiae qui primus ab oris
Italiam, fato profugus, Laviniaque venit
litora, multum ille et terris iactatus et alto
vi superum saevae memorem Iunonis ob iram;
multa quoque et bello passus, dum conderet urbem,
inferretque deos Latio, genus unde Latinum,
Albanique patres, atque altae moenia Romae.
Musa, mihi causas memora, quo numine laeso,
"""
FOO3 = b"""\
ARMA virumque cano, Troiae qui primus ab oris
ARMA virumque cano, Troiae qui primus ab oris
Italiam, fato profugus, Laviniaque venit
litora, multum ille et terris iactatus et alto
vi superum saevae memorem Iunonis ob iram;
dum conderet urbem,
inferretque deos Latio, genus unde Latinum,
Albanique patres, atque altae moenia Romae.
Albanique patres, atque altae moenia Romae.
Musa, mihi causas memora, quo numine laeso,
"""
FOO4 = b"""\
Italiam, fato profugus, Laviniaque venit
litora, multum ille et terris iactatus et alto
vi superum saevae memorem Iunonis ob iram;
dum conderet urbem,
inferretque deos Latio, genus unde Latinum,
Albanique patres, atque altae moenia Romae.
Musa, mihi causas memora, quo numine laeso,
"""
FOO5 = b"""\
litora, multum ille et terris iactatus et alto
Italiam, fato profugus, Laviniaque venit
vi superum saevae memorem Iunonis ob iram;
dum conderet urbem,
Albanique patres, atque altae moenia Romae.
Albanique patres, atque altae moenia Romae.
Musa, mihi causas memora, quo numine laeso,
inferretque deos Latio, genus unde Latinum,
ARMA virumque cano, Troiae qui primus ab oris
ARMA virumque cano, Troiae qui primus ab oris
"""
FOO6 = b"""\
ARMA virumque cano, Troiae qui primus ab oris
ARMA virumque cano, Troiae qui primus ab oris
Italiam, fato profugus, Laviniaque venit
litora, multum ille et terris iactatus et alto
vi superum saevae memorem Iunonis ob iram;
dum conderet urbem, inferretque deos Latio, genus
unde Latinum, Albanique patres, atque altae
moenia Romae. Albanique patres, atque altae
moenia Romae. Musa, mihi causas memora, quo numine laeso,
"""
|
yoyojacky/upm
|
refs/heads/master
|
examples/python/grovelight.py
|
19
|
# Author: Sarah Knepper <sarah.knepper@intel.com>
# Copyright (c) 2014 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import time
import pyupm_grove as grove
# Create the light sensor object using AIO pin 0
light = grove.GroveLight(0)
# Read the input and print both the raw value and a rough lux value,
# waiting one second between readings
while 1:
print light.name() + " raw value is %d" % light.raw_value() + \
", which is roughly %d" % light.value() + " lux";
time.sleep(1)
# Delete the light sensor object
del light
|
ingokegel/intellij-community
|
refs/heads/master
|
python/testData/resolve/multiFile/pkgResourcesNamespace/root1/pkg/a.py
|
64
|
import pkg.second
# <ref>
|
harshaneelhg/scikit-learn
|
refs/heads/master
|
sklearn/decomposition/base.py
|
313
|
"""Principal Component Analysis Base Classes"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis A. Engemann <d.engemann@fz-juelich.de>
# Kyle Kastner <kastnerkyle@gmail.com>
#
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_array
from ..utils.extmath import fast_dot
from ..utils.validation import check_is_fitted
from ..externals import six
from abc import ABCMeta, abstractmethod
class _BasePCA(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)):
"""Base class for PCA methods.
Warning: This class should not be used directly.
Use derived classes instead.
"""
def get_covariance(self):
"""Compute data covariance with the generative model.
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances, and sigma2 contains the
noise variances.
Returns
-------
cov : array, shape=(n_features, n_features)
Estimated covariance of data.
"""
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cov = np.dot(components_.T * exp_var_diff, components_)
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
return cov
def get_precision(self):
"""Compute data precision matrix with the generative model.
Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.
Returns
-------
precision : array, shape=(n_features, n_features)
Estimated precision of data.
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
return np.eye(n_features) / self.noise_variance_
if self.n_components_ == n_features:
return linalg.inv(self.get_covariance())
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
precision = np.dot(components_, components_.T) / self.noise_variance_
precision.flat[::len(precision) + 1] += 1. / exp_var_diff
precision = np.dot(components_.T,
np.dot(linalg.inv(precision), components_))
precision /= -(self.noise_variance_ ** 2)
precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
return precision
@abstractmethod
def fit(X, y=None):
"""Placeholder for fit. Subclasses should implement this method!
Fit the model with X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
def transform(self, X, y=None):
"""Apply dimensionality reduction to X.
X is projected on the first principal components previously extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import IncrementalPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> ipca = IncrementalPCA(n_components=2, batch_size=3)
>>> ipca.fit(X)
IncrementalPCA(batch_size=3, copy=True, n_components=2, whiten=False)
>>> ipca.transform(X) # doctest: +SKIP
"""
check_is_fitted(self, ['mean_', 'components_'], all_or_any=all)
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
if self.whiten:
X_transformed /= np.sqrt(self.explained_variance_)
return X_transformed
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
In other words, return an input X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform will compute the
exact inverse operation, which includes reversing whitening.
"""
if self.whiten:
return fast_dot(X, np.sqrt(self.explained_variance_[:, np.newaxis]) *
self.components_) + self.mean_
else:
return fast_dot(X, self.components_) + self.mean_
|
neuromat/abraco
|
refs/heads/master
|
faca_parte/views.py
|
1
|
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from models import Person
from forms import PersonForm
def registration(request, template_name="faca_parte.html"):
person_form = PersonForm(request.POST or None)
if request.method == "POST" and request.POST['action'] == "send":
email_typed = person_form['email'].value()
if Person.objects.filter(email=email_typed).exists():
messages.error(request, _("E-mail already registered"))
if person_form.is_valid():
person_form.save()
messages.success(request, _('Registration created successfully!'))
redirect_url = reverse("registration")
return HttpResponseRedirect(redirect_url)
else:
messages.warning(request, _('Information not saved.'))
context = {"person_form": person_form}
return render(request, template_name, context)
|
ales-erjavec/orange
|
refs/heads/master
|
Orange/testing/unit/tests/test_distance.py
|
6
|
try:
import unittest2 as unittest
except:
import unittest
from Orange.testing import testing
from Orange.testing.testing import datasets_driven, test_on_data
from Orange.distance import *
@datasets_driven
class TestEuclideanDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = Euclidean()
@datasets_driven
class TestMannhatanDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = Manhattan()
@datasets_driven
class TestHammingDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = Hamming()
@datasets_driven
class TestReliefDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = Relief()
@datasets_driven
class TestPearsonRDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = PearsonR()
@datasets_driven
class TestSpearmanRDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = SpearmanR()
@datasets_driven
class TestPearsonRAbsoluteDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = PearsonRAbsolute()
@datasets_driven
class TestSpearmanRAbsoluteDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = SpearmanRAbsolute()
@datasets_driven
class TestMahalanobisDistance(testing.DistanceTestCase):
DISTANCE_CONSTRUCTOR = Mahalanobis()
if __name__ == "__main__":
unittest.main()
|
bdubertret/uwsgi
|
refs/heads/master
|
plugins/xslt/uwsgiplugin.py
|
13
|
import os
NAME='xslt'
CFLAGS = os.popen('xslt-config --cflags').read().rstrip().split()
LDFLAGS = []
LIBS = os.popen('xslt-config --libs').read().rstrip().split()
GCC_LIST = ['xslt']
|
nexdatas/writer
|
refs/heads/develop
|
test/ClientFieldTagWriterH5Cpp_test.py
|
1
|
#!/usr/bin/env python
# This file is part of nexdatas - Tango Server for NeXus data writer
#
# Copyright (C) 2012-2017 DESY, Jan Kotanski <jkotan@mail.desy.de>
#
# nexdatas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# nexdatas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with nexdatas. If not, see <http://www.gnu.org/licenses/>.
# \package test nexdatas
# \file ClientFieldTagWriterTest.py
# unittests for field Tags running Tango Server
#
import unittest
import os
import sys
import random
import struct
import binascii
import time
import numpy
try:
from Checkers import Checker
except Exception:
from .Checkers import Checker
from nxswriter.TangoDataWriter import TangoDataWriter
from nxstools import filewriter as FileWriter
from nxstools import h5cppwriter as H5CppWriter
# if 64-bit machione
IS64BIT = (struct.calcsize("P") == 8)
# test fixture
if sys.version_info > (3,):
long = int
class ClientFieldTagWriterH5CppTest(unittest.TestCase):
# constructor
# \param methodName name of the test method
def __init__(self, methodName):
unittest.TestCase.__init__(self, methodName)
try:
# random seed
self.seed = long(binascii.hexlify(os.urandom(16)), 16)
except NotImplementedError:
# random seed
self.seed = long(time.time() * 256) # use fractional seconds
# self.seed = 53867028435352363366241944565880343254
self.__rnd = random.Random(self.seed)
self._counter = [1, -2, 6, -8, 9, -11]
self._fcounter = [1.1, -2.4, 6.54, -8.456, 9.456, -0.46545]
self._sc = Checker(self)
self._mca1 = [[self.__rnd.randint(-100, 100)
for e in range(256)] for i in range(3)]
self._mca2 = [[self.__rnd.randint(0, 100)
for e in range(256)] for i in range(3)]
self._fmca1 = [self._sc.nicePlot(1024, 10) for i in range(4)]
# self._fmca2 = [(float(e)/(100.+e)) for e in range(2048)]
self._pco1 = [[[self.__rnd.randint(0, 100) for e1 in range(8)]
for e2 in range(10)] for i in range(3)]
self._fpco1 = [self._sc.nicePlot2D(20, 30, 5) for i in range(4)]
self._bint = "int64" if IS64BIT else "int32"
self._buint = "uint64" if IS64BIT else "uint32"
self._bfloat = "float64" if IS64BIT else "float32"
# test starter
# \brief Common set up
def setUp(self):
print("\nsetting up...")
print("SEED = %s" % self.seed)
print("CHECKER SEED = %s" % self._sc.seed)
# test closer
# \brief Common tear down
def tearDown(self):
print("tearing down ...")
def setProp(self, rc, name, value):
setattr(rc, name, value)
# opens writer
# \param fname file name
# \param xml XML settings
# \param json JSON Record with client settings
# \returns Tango Data Writer instance
def openWriter(self, fname, xml, json=None):
tdw = TangoDataWriter()
self.setProp(tdw, "writer", "h5cpp")
tdw.fileName = fname
tdw.openFile()
tdw.xmlsettings = xml
# tdw.numberOfThreads = 1
if json:
tdw.jsonrecord = json
tdw.openEntry()
return tdw
# closes writer
# \param tdw Tango Data Writer instance
# \param json JSON Record with client settings
def closeWriter(self, tdw, json=None):
if json:
tdw.jsonrecord = json
tdw.closeEntry()
tdw.closeFile()
# performs one record step
def record(self, tdw, string):
tdw.record(string)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientIntScalar(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="m" type="NX_INT" name="counter">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt"/>
</datasource>
</field>
<field units="m" type="NX_INT8" name="counter8">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_8"/>
</datasource>
</field>
<field units="m" type="NX_INT16" name="triggered_counter16">
<strategy mode="STEP" trigger="trigger1"/>
<datasource type="CLIENT">
<record name="cnt_16"/>
</datasource>
</field>
<field units="m" type="NX_INT32" name="counter32">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_32"/>
</datasource>
</field>
<field units="m" type="NX_INT64" name="counter64">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_64"/>
</datasource>
</field>
<field units="m" type="NX_UINT" name="ucounter">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_u"/>
</datasource>
</field>
<field units="m" type="NX_POSINT" name="pcounter">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_p"/>
</datasource>
</field>
<field units="m" type="NX_UINT8" name="ucounter8">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_u8"/>
</datasource>
</field>
<field units="m" type="NX_UINT16" name="ucounter16">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_u16"/>
</datasource>
</field>
<field units="m" type="NX_UINT32" name="mclient_ucounter32">
<strategy mode="STEP"/>
<datasource type="MCLIENT" name="external datasource">
<record name="cnt_u32"/>
</datasource>
</field>
<field units="m" type="NX_UINT64" name="ucounter64">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_u64"/>
</datasource>
</field>
<field units="m" type="NX_UINT64" name="ucounter64_canfail">
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="cnt_u64_canfail"/>
</datasource>
</field>
<field units="m" type="NX_INT64" name="init64">
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="cnt_64"/>
</datasource>
</field>
<field units="m" type="NX_UINT32" name="final32">
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="cnt_u32"/>
</datasource>
</field>
<field units="m" type="NX_INT32" name="final32_canfail">
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="cnt_32_canfail"/>
</datasource>
</field>
<field units="m" type="NX_INT64" name="init64_canfail">
<strategy mode="INIT" canfail="true"/>
<datasource type="CLIENT">
<record name="cnt_64_canfail"/>
</datasource>
</field>
<field units="m" type="NX_INT32" name="postrun_counter32">
<strategy mode="POSTRUN">
https://haso.desy.de/counters/counter32.dat
</strategy>
</field>
</group>
</group>
</group>
</definition>
"""
uc = self._counter[0]
datasources = ', "datasources":{"MCLIENT":' + \
'"nxswriter.ClientSource.ClientSource"}'
tdw = self.openWriter(
fname, xml,
json='{"data": { "cnt_64":' + str(uc) + ' }' +
str(datasources) + ' }')
flip = True
trigstr = ', "triggers":["trigger1"]'
for c in self._counter:
uc = abs(c)
self.record(tdw, '{"data": {"cnt":' + str(c) +
', "cnt_8":' + str(c) +
', "cnt_16":' + str(c) +
', "cnt_32":' + str(c) +
', "cnt_64":' + str(c) +
', "cnt_u":' + str(uc) +
', "cnt_p":' + str(uc) +
', "cnt_u8":' + str(uc) +
', "cnt_u16":' + str(uc) +
', "cnt_u32":' + str(uc) +
((', "cnt_u64_canfail":' + str(uc)) if flip else ' ') +
', "cnt_u64":' + str(uc) +
' } ' +
str(trigstr if flip else ' ') +
' }')
flip = not flip
uc = abs(self._counter[0])
self.closeWriter(
tdw, json='{"data": { "cnt_u32":' + str(uc) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 17)
self._sc.checkScalarField(
det, "counter", "int64", "NX_INT", self._counter)
self._sc.checkScalarField(
det, "counter8", "int8", "NX_INT8", self._counter)
self._sc.checkScalarField(
det, "triggered_counter16", "int16", "NX_INT16",
self._counter[0::2])
self._sc.checkScalarField(
det, "counter32", "int32", "NX_INT32", self._counter)
self._sc.checkScalarField(
det, "counter64", "int64", "NX_INT64", self._counter)
self._sc.checkScalarField(
det, "ucounter", "uint64", "NX_UINT",
[abs(c) for c in self._counter])
self._sc.checkScalarField(
det, "ucounter8", "uint8", "NX_UINT8",
[abs(c) for c in self._counter])
self._sc.checkScalarField(det, "ucounter16", "uint16", "NX_UINT16",
[abs(c) for c in self._counter])
self._sc.checkScalarField(
det, "mclient_ucounter32", "uint32", "NX_UINT32",
[abs(c) for c in self._counter])
self._sc.checkScalarField(det, "ucounter64", "uint64", "NX_UINT64",
[abs(c) for c in self._counter])
self._sc.checkScalarField(
det, "ucounter64_canfail", "uint64", "NX_UINT64",
[self._counter[i] if not i % 2 else
numpy.iinfo(getattr(numpy, 'int64')).max
for i in range(len(self._counter))],
attrs={
"type": "NX_UINT64", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "STEP",
"nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleScalarField(
det, "init64", "int64", "NX_INT64", self._counter[0])
self._sc.checkSingleScalarField(
det, "final32", "uint32", "NX_UINT32", abs(self._counter[0]))
self._sc.checkSingleScalarField(
det, "final32_canfail", "int32", "NX_INT32", numpy.iinfo(
getattr(numpy, 'int32')).max,
attrs={"type": "NX_INT32", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleScalarField(
det, "init64_canfail", "int64", "NX_INT64",
numpy.iinfo(getattr(numpy, 'int64')).max,
attrs={"type": "NX_INT64", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkPostScalarField(
det, "postrun_counter32", "int32", "NX_INT32",
"https://haso.desy.de/counters/counter32.dat")
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientAttrScalar(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<attribute type="NX_FLOAT" name="scalar_float">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="fcnt"/>
</datasource>
</attribute>
<attribute type="NX_FLOAT32" name="scalar_float32_canfail">
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="fcnt_canfail"/>
</datasource>
</attribute>
<attribute type="NX_CHAR" name="scalar_string">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="fcnt"/>
</datasource>
</attribute>
<attribute type="NX_INT" name="init_scalar_int">
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="cnt"/>
</datasource>
</attribute>
<attribute type="NX_INT64" name="final_scalar_int64_canfail">
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="cnt_canfail"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="flag">
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="logical"/>
</datasource>
</attribute>
</group>
<field type="NX_FLOAT" name="counter">
<attribute type="NX_FLOAT32" name="scalar_float32">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="fcnt"/>
</datasource>
</attribute>
<attribute type="NX_FLOAT64" name="init_scalar_float64_canfail">
<strategy mode="INIT" canfail="true"/>
<datasource type="CLIENT">
<record name="fcnt_canfail"/>
</datasource>
</attribute>
<attribute type="NX_UINT32" name="scalar_uint32_canfail">
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="fcnt_canfail"/>
</datasource>
</attribute>
<attribute type="NX_CHAR" name="scalar_string">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="fcnt"/>
</datasource>
</attribute>
<attribute type="NX_INT8" name="final_scalar_int8">
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="cnt"/>
</datasource>
</attribute>
1.2
</field>
</group>
</group>
</definition>
"""
logical = ["1", "0", "true", "false", "True", "False", "TrUe", "FaLsE"]
tdw = self.openWriter(fname, xml, json='{"data": {' +
' "cnt":' + str(self._counter[0]) +
', "logical":' + str(logical[0]) +
' } }')
steps = min(len(self._fcounter), len(self._counter))
for i in range(steps):
self.record(tdw, '{"data": {' +
' "cnt":' + str(self._counter[i]) +
', "fcnt":' + str(self._fcounter[i]) +
', "cnt_32":' + str(self._fcounter[i]) +
', "cnt_64":' + str(self._fcounter[i]) +
' } }')
self.closeWriter(
tdw, json='{"data": { "cnt":' + str(self._counter[0]) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det, field = self._sc.checkAttributeTree(f, fname, 8, 7)
self._sc.checkScalarAttribute(
det, "scalar_float", "float64", self._fcounter[steps - 1],
error=1.e-14)
self._sc.checkScalarAttribute(
det, "scalar_string", "string",
str(self._fcounter[steps - 1]))
self._sc.checkScalarAttribute(
det, "init_scalar_int", "int64", self._counter[0])
self._sc.checkScalarAttribute(det, "flag", "bool", logical[0])
self._sc.checkScalarAttribute(
field, "scalar_float32", "float32", self._fcounter[steps - 1],
error=1.e-6)
self._sc.checkScalarAttribute(
field, "init_scalar_float64_canfail", "float64",
numpy.finfo(getattr(numpy, 'float64')).max)
self._sc.checkScalarAttribute(field, "scalar_string", "string",
str(self._fcounter[steps - 1]))
self._sc.checkScalarAttribute(
field, "final_scalar_int8", "int8", self._counter[0])
self._sc.checkScalarAttribute(
det, "final_scalar_int64_canfail", "int64",
numpy.iinfo(getattr(numpy, 'int64')).max)
self._sc.checkScalarAttribute(
field, "scalar_uint32_canfail", "uint32",
numpy.iinfo(getattr(numpy, 'uint32')).max)
self._sc.checkScalarAttribute(
det, "scalar_float32_canfail", "float32",
numpy.finfo(getattr(numpy, 'float32')).max)
self._sc.checkScalarAttribute(
det, "nexdatas_canfail", "string", "FAILED")
self._sc.checkScalarAttribute(
field, "nexdatas_canfail", "string", "FAILED")
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientFloatScalar(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="m" type="NX_FLOAT" name="counter">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt"/>
</datasource>
</field>
<field units="m" type="NX_FLOAT32" name="counter_32">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_32"/>
</datasource>
</field>
<field units="m" type="NX_FLOAT64" name="counter_64">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_64"/>
</datasource>
</field>
<field units="m" type="NX_NUMBER" name="counter_nb">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="cnt_64"/>
</datasource>
</field>
<field units="m" type="NX_NUMBER" name="counter_nb_canfail">
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="cnt_64_canfail"/>
</datasource>
</field>
<field units="m" type="NX_FLOAT32" name="init_32">
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="cnt_32"/>
</datasource>
</field>
<field units="m" type="NX_FLOAT64" name="final_64">
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="cnt_64"/>
</datasource>
</field>
<field units="m" type="NX_FLOAT32" name="final_32_canfail">
<strategy mode="FINAL" canfail="true" />
<datasource type="CLIENT">
<record name="cnt_32_canfail"/>
</datasource>
</field>
<field units="m" type="NX_FLOAT64" name="init_64_canfail">
<strategy mode="INIT" canfail="true" />
<datasource type="CLIENT">
<record name="cnt_64_canfail"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
tdw = self.openWriter(
fname, xml,
json='{"data": { "cnt_32":' + str(self._fcounter[0]) + ' } }')
flip = True
for c in self._fcounter:
self.record(
tdw, '{"data": {"cnt":' + str(c) +
', "cnt_32":' + str(c) +
', "cnt_64":' + str(c) +
((', "cnt_64_canfail":' + str(c)) if flip else ' ') +
' } }')
flip = not flip
self.closeWriter(
tdw, json='{"data": { "cnt_64":' + str(self._fcounter[0]) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 9)
self._sc.checkScalarField(
det, "counter", "float64", "NX_FLOAT", self._fcounter, 1.0e-14)
self._sc.checkScalarField(
det, "counter_64", "float64", "NX_FLOAT64",
self._fcounter, 1.0e-14)
self._sc.checkScalarField(
det, "counter_32", "float32", "NX_FLOAT32",
self._fcounter, 1.0e-06)
self._sc.checkScalarField(
det, "counter_nb", "float64", "NX_NUMBER",
self._fcounter, 1.0e-14)
self._sc.checkSingleScalarField(
det, "init_32", "float32", "NX_FLOAT32",
self._fcounter[0], 1.0e-06)
self._sc.checkSingleScalarField(
det, "final_64", "float64", "NX_FLOAT64",
self._fcounter[0], 1.0e-14)
self._sc.checkScalarField(
det, "counter_nb_canfail", "float64", "NX_NUMBER",
[self._fcounter[i] if not i % 2 else
numpy.finfo(getattr(numpy, 'float64')).max
for i in range(len(self._fcounter))],
attrs={
"type": "NX_NUMBER", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleScalarField(
det, "init_64_canfail", "float64", "NX_FLOAT64",
numpy.finfo(getattr(numpy, 'float64')).max,
attrs={
"type": "NX_FLOAT64", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleScalarField(
det, "final_32_canfail", "float32", "NX_FLOAT32",
numpy.finfo(getattr(numpy, 'float32')).max,
attrs={
"type": "NX_FLOAT32", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientScalar(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="m" type="NX_DATE_TIME" name="time">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="timestamp"/>
</datasource>
</field>
<field units="m" type="ISO8601" name="isotime">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="timestamp"/>
</datasource>
</field>
<field units="m" type="NX_CHAR" name="string_time">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="timestamp"/>
</datasource>
</field>
<field units="m" type="NX_CHAR" name="string_time_canfail">
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="timestamp_canfail"/>
</datasource>
</field>
<field units="m" type="NX_BOOLEAN" name="flags">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="logical"/>
</datasource>
</field>
<field units="m" type="NX_BOOLEAN" name="bool_flags">
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="bool"/>
</datasource>
</field>
<field units="m" type="NX_CHAR" name="init_string">
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="timestamp"/>
</datasource>
</field>
<field units="m" type="NX_BOOLEAN" name="final_flag">
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="logical"/>
</datasource>
</field>
<field units="m" type="NX_CHAR" name="final_string_canfail">
<strategy mode="FINAL" canfail ="true" />
<datasource type="CLIENT">
<record name="timestamp_canfail"/>
</datasource>
</field>
<field units="m" type="NX_BOOLEAN" name="init_flag_canfail">
<strategy mode="INIT" canfail ="true" />
<datasource type="CLIENT">
<record name="logical_canfail"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
dates = [
"1996-07-31T21:15:22.123+0600", "2012-11-14T14:05:23.2344-0200",
"2014-02-04T04:16:12.43-0100", "2012-11-14T14:05:23.2344-0200",
"1996-07-31T21:15:22.123+0600", "2012-11-14T14:05:23.2344-0200",
"2014-02-04T04:16:12.43-0100", "2012-11-14T14:05:23.2344-0200",
]
logical = ["1", "0", "true", "false", "True", "False", "TrUe", "FaLsE"]
tdw = self.openWriter(
fname, xml,
json='{"data": { "timestamp":"' + str(dates[0]) + '" } }')
flip = True
for i in range(min(len(dates), len(logical))):
self.record(tdw, '{"data": {"timestamp":"' + str(dates[i]) +
'", "logical":"' + str(logical[i]) +
'", "bool":true' +
((', "timestamp_canfail":"' + str(
dates[i]) + '"') if flip else ' ') +
' } }')
flip = not flip
self.closeWriter(
tdw, json='{"data": { "logical":"' + str(logical[0]) + '" } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 10)
self._sc.checkScalarField(det, "time", "string", "NX_DATE_TIME", dates)
self._sc.checkScalarField(det, "isotime", "string", "ISO8601", dates)
self._sc.checkScalarField(
det, "string_time", "string", "NX_CHAR", dates)
self._sc.checkScalarField(det, "flags", "bool", "NX_BOOLEAN", logical)
self._sc.checkScalarField(
det, "bool_flags", "bool", "NX_BOOLEAN", [True for c in range(8)])
self._sc.checkScalarField(
det, "string_time_canfail", "string", "NX_CHAR",
[dates[i] if not i % 2 else ''for i in range(len(dates))],
attrs={"type": "NX_CHAR", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleStringScalarField(
det, "final_string_canfail", "string", "NX_CHAR", '',
attrs={"type": "NX_CHAR", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleScalarField(
det, "init_flag_canfail", "bool", "NX_BOOLEAN", False,
attrs={
"type": "NX_BOOLEAN", "units": "m", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleStringScalarField(
det, "init_string", "string", "NX_CHAR", dates[0])
self._sc.checkSingleScalarField(
det, "final_flag", "bool", "NX_BOOLEAN", logical[0])
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientIntSpectrum(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="" type="NX_INT" name="mca_int">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_INT8" name="mca_int8">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_INT16" name="mca_int16">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_INT16" name="mca_int16_canfail">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" canfail="true"/>
<datasource type="CLIENT">
<record name="mca_int_canfail"/>
</datasource>
</field>
<field units="" type="NX_INT32" name="mca_int32">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2"
shuffle="false" />
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="mca_int64">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_UINT" name="mca_uint">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT8" name="mca_uint8">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT16" name="mca_uint16">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true"/>
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT32" name="mca_uint32">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2"
shuffle="false" />
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT32" name="mca_uint32_canfail" >
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2"
shuffle="false" canfail="true"/>
<datasource type="CLIENT">
<record name="mca_uint_canfail"/>
</datasource>
</field>
<field units="" type="NX_UINT64" name="mca_uint64">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="mca_int64_dim">
<dimensions rank="1"/>
<strategy mode="STEP" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="init_mca_int64">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="INIT" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</field>
<field units="" type="NX_UINT32" name="final_mca_uint32">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="init_mca_int64_canfail">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="INIT" compression="true" rate="3" canfail="true" />
<datasource type="CLIENT">
<record name="mca_int_canfail"/>
</datasource>
</field>
<field units="" type="NX_UINT32" name="final_mca_uint32_canfail">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="mca_uint_canfail"/>
</datasource>
</field>
<field units="" type="NX_INT32" name="init_mca_int32">
<dimensions rank="1"/>
<strategy mode="INIT" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="mca_iint"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
tdw = self.openWriter(
fname, xml,
json='{"data": { "mca_int":' + str(self._mca1[0]) +
', "mca_iint":' + str(self._mca1[0]) + ' } }')
mca2 = [[(el + 100) // 2 for el in mca] for mca in self._mca1]
flip = True
for mca in self._mca1:
self.record(
tdw, '{"data": { "mca_int":' + str(mca) +
', "mca_uint":' + str([(el + 100) // 2 for el in mca]) +
(', "mca_int_canfail":' + str(mca) if flip else "") +
(', "mca_uint_canfail":' + str(
[(el + 100) // 2 for el in mca]) if flip else "") +
' } }')
flip = not flip
self.closeWriter(
tdw, json='{"data": { "mca_uint":' + str(mca2[0]) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 18)
self._sc.checkSpectrumField(
det, "mca_int", "int64", "NX_INT", self._mca1)
self._sc.checkSpectrumField(
det, "mca_int8", "int8", "NX_INT8", self._mca1, grows=2)
self._sc.checkSpectrumField(
det, "mca_int16", "int16", "NX_INT16", self._mca1)
self._sc.checkSpectrumField(
det, "mca_int32", "int32", "NX_INT32", self._mca1, grows=2)
self._sc.checkSpectrumField(
det, "mca_int64", "int64", "NX_INT64", self._mca1)
self._sc.checkSpectrumField(
det, "mca_uint", "uint64", "NX_UINT", mca2)
self._sc.checkSpectrumField(
det, "mca_uint8", "uint8", "NX_UINT8", mca2, grows=2)
self._sc.checkSpectrumField(
det, "mca_uint16", "uint16", "NX_UINT16", mca2)
self._sc.checkSpectrumField(
det, "mca_uint32", "uint32", "NX_UINT32", mca2, grows=2)
self._sc.checkSpectrumField(
det, "mca_uint64", "uint64", "NX_UINT64", mca2)
self._sc.checkSpectrumField(
det, "mca_int64_dim", "int64", "NX_INT64", self._mca1)
self._sc.checkSingleSpectrumField(
det, "init_mca_int64", "int64", "NX_INT64", self._mca1[0])
self._sc.checkSingleSpectrumField(
det, "init_mca_int32", "int32", "NX_INT32", self._mca1[0])
self._sc.checkSingleSpectrumField(
det, "final_mca_uint32", "uint32", "NX_UINT32", mca2[0])
self._sc.checkSpectrumField(
det, "mca_int16_canfail", "int16", "NX_INT16",
[[(self._mca1[j][i] if not j % 2 else
numpy.iinfo(getattr(numpy, 'int16')).max)
for i in range(len(self._mca1[j]))]
for j in range(len(self._mca1))],
grows=1,
attrs={
"type": "NX_INT16", "units": "",
"nexdatas_strategy": "STEP", "nexdatas_source": None,
"nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSpectrumField(
det, "mca_uint32_canfail", "uint32", "NX_UINT32",
[[((self._mca1[j][i] + 100) // 2 if not j % 2 else
numpy.iinfo(getattr(numpy, 'uint32')).max)
for i in range(len(self._mca1[j]))]
for j in range(len(self._mca1))],
grows=2, attrs={
"type": "NX_UINT32", "units": "",
"nexdatas_strategy": "STEP",
"nexdatas_source": None,
"nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleSpectrumField(
det, "final_mca_uint32_canfail", "uint32", "NX_UINT32",
[numpy.iinfo(getattr(numpy, 'uint32')).max] * 256,
attrs={"type": "NX_UINT32", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleSpectrumField(
det, "init_mca_int64_canfail", "int64", "NX_INT64",
[numpy.iinfo(getattr(numpy, 'int64')).max] * 256,
attrs={"type": "NX_INT64", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientFloatSpectrum(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="" type="NX_FLOAT" name="mca_float">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="mca_float32">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2" shuffle="true"/>
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="mca_float64">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="mca_float32_canfail">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2" shuffle="true"
canfail="true"/>
<datasource type="CLIENT">
<record name="mca_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="mca_float64_canfail">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" grows="1" canfail="true"/>
<datasource type="CLIENT">
<record name="mca_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_NUMBER" name="mca_number">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" />
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT" name="mca_float_dim">
<dimensions rank="1"/>
<strategy mode="STEP" />
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="init_mca_float32">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="INIT" compression="true" shuffle="true"/>
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="final_mca_float64">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="FINAL" />
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="init_mca_float32_canfail">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="INIT" compression="true" shuffle="true"
canfail="true"/>
<datasource type="CLIENT">
<record name="mca_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="final_mca_float64_canfail">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="FINAL" canfail="true" />
<datasource type="CLIENT">
<record name="mca_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_FLOAT" name="final_mca_float">
<dimensions rank="1"/>
<strategy mode="FINAL" />
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
tdw = self.openWriter(
fname, xml,
json='{"data": { "mca_float":' + str(self._fmca1[0]) + ' } }')
flip = True
for mca in self._fmca1:
self.record(
tdw,
'{"data": { "mca_float":' + str(mca) +
(', "mca_float_canfail":' + str(mca) if flip else "") +
' } }')
flip = not flip
self.closeWriter(
tdw,
json='{"data": { "mca_float":' + str(self._fmca1[0]) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 12)
self._sc.checkSpectrumField(
det, "mca_float", "float64", "NX_FLOAT", self._fmca1,
error=1.0e-14)
self._sc.checkSpectrumField(
det, "mca_float_dim", "float64", "NX_FLOAT", self._fmca1,
error=1.0e-14)
self._sc.checkSpectrumField(
det, "mca_float32", "float32", "NX_FLOAT32", self._fmca1,
error=1.0e-6, grows=2)
self._sc.checkSpectrumField(
det, "mca_float64", "float64", "NX_FLOAT64", self._fmca1,
error=1.0e-14, grows=2)
self._sc.checkSpectrumField(
det, "mca_number", "float64", "NX_NUMBER", self._fmca1,
error=1.0e-14)
self._sc.checkSingleSpectrumField(
det, "init_mca_float32", "float32", "NX_FLOAT32", self._fmca1[0],
error=1.0e-6)
self._sc.checkSingleSpectrumField(
det, "final_mca_float64", "float64", "NX_FLOAT64", self._fmca1[0],
error=1.0e-14)
self._sc.checkSingleSpectrumField(
det, "final_mca_float", "float64", "NX_FLOAT", self._fmca1[0],
error=1.0e-14)
self._sc.checkSingleSpectrumField(
det, "init_mca_float32_canfail", "float32", "NX_FLOAT32",
[numpy.finfo(getattr(numpy, 'float32')).max] * len(self._fmca1[0]),
attrs={
"type": "NX_FLOAT32", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleSpectrumField(
det, "final_mca_float64_canfail", "float64", "NX_FLOAT64",
[numpy.finfo(getattr(numpy, 'float64')).max] * len(self._fmca1[0]),
attrs={
"type": "NX_FLOAT64", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSpectrumField(
det, "mca_float32_canfail", "float32", "NX_FLOAT32",
[[(self._fmca1[j][i] if not j % 2 else
numpy.finfo(getattr(numpy, 'float32')).max)
for i in range(
len(self._fmca1[j]))] for j in range(len(self._fmca1))],
grows=2,
attrs={
"type": "NX_FLOAT32", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-6)
self._sc.checkSpectrumField(
det, "mca_float64_canfail", "float64", "NX_FLOAT64",
[[(self._fmca1[j][i] if not j % 2 else
numpy.finfo(getattr(numpy, 'float64')).max)
for i in range(
len(self._fmca1[j]))] for j in range(len(self._fmca1))],
grows=1,
attrs={
"type": "NX_FLOAT64", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-6)
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientSpectrum(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="" type="NX_DATE_TIME" name="time">
<strategy mode="STEP" compression="true" rate="3"/>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
</field>
<field units="" type="ISO8601" name="isotime">
<strategy mode="STEP" compression="true" grows="2" shuffle="true"/>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="string_time">
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="flags">
<strategy mode="STEP"/>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="string_time_canfail">
<strategy mode="STEP" grows="2" canfail="true"/>
<datasource type="CLIENT">
<record name="timestamps_canfail"/>
</datasource>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="flags_canfail">
<strategy mode="STEP" canfail="true"/>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals_canfail"/>
</datasource>
</field>
<field units="" type="NX_BOOLEAN" name="bool_flags">
<strategy mode="STEP"/>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<datasource type="CLIENT">
<record name="bool"/>
</datasource>
</field>
<field units="" type="NX_BOOLEAN" name="flags_dim">
<strategy mode="STEP"/>
<dimensions rank="1" />
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="string_time_dim">
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="1"/>
</field>
<field units="" type="NX_CHAR" name="init_string_time">
<strategy mode="INIT" compression="true" shuffle="true"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="final_flags">
<strategy mode="FINAL"/>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<strategy mode="FINAL" />
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="init_string_time_canfail">
<strategy mode="INIT" compression="true" shuffle="true"
canfail="true"/>
<datasource type="CLIENT">
<record name="timestamps_canfail"/>
</datasource>
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="final_flags_canfail">
<dimensions rank="1">
<dim value="4" index="1"/>
</dimensions>
<strategy mode="FINAL" canfail="true" />
<datasource type="CLIENT">
<record name="logicals_canfail"/>
</datasource>
</field>
<field units="" type="NX_BOOLEAN" name="init_flags">
<strategy mode="INIT"/>
<dimensions rank="1" />
<strategy mode="FINAL" />
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="final_string_time">
<strategy mode="FINAL" compression="true" shuffle="true"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="1" />
</field>
</group>
</group>
</group>
</definition>
"""
dates = [
["1996-07-31T21:15:22.123+0600", "2012-11-14T14:05:23.2344-0200",
"2014-02-04T04:16:12.43-0100", "2012-11-14T14:05:23.2344-0200"],
["1956-05-23T12:12:32.123+0400", "2212-12-12T12:25:43.1267-0700",
"1914-11-04T04:13:13.44-0000", "2002-04-03T14:15:03.0012-0300"]]
logical = [["1", "0", "true", "false"],
["True", "False", "TrUe", "FaLsE"]]
# print "CHECK:", '{"data": { "timestamps":' +
# str(dates[0]).replace("'","\"") + ' } }'
bools = ["[true, false, true, false]", "[true, false, true, false]"]
tdw = self.openWriter(
fname, xml, json='{"data": {' +
' "timestamps":' +
str(dates[0]).replace("'", "\"") +
', "logicals":' +
str(logical[0]).replace("'", "\"") +
' } }')
flip = True
for i in range(min(len(dates), len(logical))):
self.record(
tdw, '{"data": {"timestamps":' +
str(dates[i]).replace("'", "\"") +
', "logicals":' + str(logical[i]).replace("'", "\"") +
(', "logicals_canfail":' + str(logical[i]).replace("'", "\"")
if flip else '') +
(', "timestamps_canfail":' + str(dates[i]).replace("'", "\"")
if flip else '') +
', "bool":' + bools[i] +
' } }')
flip = not flip
self.closeWriter(tdw, json='{"data": {' +
' "timestamps":' +
str(dates[0]).replace("'", "\"") +
', "logicals":' +
str(logical[0]).replace("'", "\"") +
' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 15)
self._sc.checkSpectrumField(
det, "bool_flags", "bool", "NX_BOOLEAN", logical)
self._sc.checkSpectrumField(
det, "time", "string", "NX_DATE_TIME", dates)
self._sc.checkSpectrumField(
det, "string_time", "string", "NX_CHAR", dates, grows=2)
self._sc.checkSpectrumField(
det, "flags", "bool", "NX_BOOLEAN", logical)
self._sc.checkSpectrumField(
det, "isotime", "string", "ISO8601", dates, grows=2)
self._sc.checkSpectrumField(
det, "string_time_dim", "string", "NX_CHAR", dates, grows=2)
self._sc.checkSingleSpectrumField(
det, "init_string_time", "string", "NX_CHAR", dates[0])
self._sc.checkSingleSpectrumField(
det, "final_flags", "bool", "NX_BOOLEAN", logical[0])
self._sc.checkSingleSpectrumField(
det, "init_string_time_canfail",
"string", "NX_CHAR", [''] * len(dates[0]),
attrs={"type": "NX_CHAR", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleSpectrumField(
det, "final_flags_canfail", "bool",
"NX_BOOLEAN", [False] * len(logical[0]),
attrs={
"type": "NX_BOOLEAN", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleSpectrumField(
det, "final_string_time", "string", "NX_CHAR", dates[0])
self._sc.checkSingleSpectrumField(
det, "init_flags", "bool", "NX_BOOLEAN", logical[0])
self._sc.checkSpectrumField(
det, "flags_canfail", "bool", "NX_BOOLEAN",
[[(logical[j][i] if not j % 2 else False)
for i in range(len(logical[j]))] for j in range(len(logical))],
grows=1,
attrs={
"type": "NX_BOOLEAN", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSpectrumField(
det, "string_time_canfail", "string", "NX_CHAR",
[[(dates[j][i] if not j % 2 else '')
for i in range(len(dates[j]))] for j in range(len(dates))],
attrs={
"type": "NX_CHAR", "units": "", "nexdatas_source": None,
"nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None, "nexdatas_strategy": "STEP"},
grows=2
)
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientAttrSpectrum(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<attribute type="NX_FLOAT" name="spectrum_float">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</attribute>
<attribute type="NX_INT32" name="init_spectrum_int32">
<dimensions rank="1" />
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="mca_int"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="spectrum_bool">
<dimensions rank="1">
<dim value="8" index="1"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="flags"/>
</datasource>
</attribute>
<attribute type="NX_UINT64" name="spectrum_uint64_canfail">
<dimensions rank="1">
<dim value="1024" index="1"/>
</dimensions>
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="mca_uint_canfail"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="spectrum_bool_canfail">
<dimensions rank="1">
<dim value="8" index="1"/>
</dimensions>
<strategy mode="INIT" canfail="true"/>
<datasource type="CLIENT">
<record name="flags_canfail"/>
</datasource>
</attribute>
</group>
<field type="NX_FLOAT" name="counter">
<attribute type="NX_FLOAT32" name="spectrum_float32">
<dimensions rank="1" />
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="mca_float"/>
</datasource>
</attribute>
<attribute type="NX_UINT64" name="final_spectrum_uint64">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="mca_uint"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="init_spectrum_bool">
<dimensions rank="1">
<dim value="8" index="1"/>
</dimensions>
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="flags"/>
</datasource>
</attribute>
<attribute type="NX_UINT64" name="final_spectrum_uint64_canfail">
<dimensions rank="1">
<dim value="256" index="1"/>
</dimensions>
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="mca_uint_canfail"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="init_spectrum_bool_canfail">
<dimensions rank="1">
<dim value="8" index="1"/>
</dimensions>
<strategy mode="INIT" canfail="true"/>
<datasource type="CLIENT">
<record name="flags_canfail"/>
</datasource>
</attribute>
1.2
</field>
</group>
</group>
</definition>
"""
# <attribute type="NX_CHAR" name="flag_spectrum_string">
# <dimensions rank="1">
# <dim value="8" index="1"/>
# </dimensions>
# <strategy mode="STEP"/>
# <datasource type="CLIENT">
# <record name="flags"/>
# </datasource>
# </attribute>
logical = ["1", "0", "true", "false", "True", "False", "TrUe", "FaLsE"]
tdw = self.openWriter(
fname, xml, json='{"data": {' +
' "mca_float":' + str(self._fmca1[0]) +
', "flags":' +
str(logical).replace("'", "\"") +
', "mca_int":' + str(self._mca1[0]) +
' } }')
steps = min(len(self._fmca1), len(self._fmca1))
flip = True
for i in range(steps):
self.record(
tdw, '{"data": {' +
' "mca_float":' + str(self._fmca1[i]) +
', "flags":' + str(logical).replace("'", "\"") +
' } }')
flip = not flip
self.closeWriter(
tdw, json='{"data": {' +
' "mca_float":' + str(self._fmca1[0]) +
', "mca_int":' + str(self._mca1[0]) +
', "flags":' + str(logical).replace("'", "\"") +
', "mca_uint":' + str(self._mca2[0]) +
' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det, field = self._sc.checkAttributeTree(f, fname, 7, 7)
self._sc.checkSpectrumAttribute(
det, "spectrum_float", "float64", self._fmca1[steps - 1],
error=1.e-14)
self._sc.checkSpectrumAttribute(
det, "init_spectrum_int32", "int32", self._mca1[0])
self._sc.checkSpectrumAttribute(det, "spectrum_bool", "bool", logical)
self._sc.checkSpectrumAttribute(
field, "spectrum_float32", "float32", self._fmca1[steps - 1],
error=1.e-6)
self._sc.checkSpectrumAttribute(
field, "final_spectrum_uint64", "uint64", self._mca2[0])
self._sc.checkSpectrumAttribute(
field, "init_spectrum_bool", "bool", logical)
# NOT SUPPORTED BY PNINX
# self._sc.checkSpectrumAttribute(field, "flag_spectrum_string", "string",
# logical)
self._sc.checkSpectrumAttribute(
det, "spectrum_uint64_canfail", "uint64",
[numpy.iinfo(getattr(numpy, 'int64')).max] * 1024)
self._sc.checkSpectrumAttribute(
det, "spectrum_bool_canfail", "bool",
[False] * 8)
self._sc.checkSpectrumAttribute(
field, "final_spectrum_uint64_canfail", "uint64",
[numpy.iinfo(getattr(numpy, 'int64')).max] * 256)
self._sc.checkSpectrumAttribute(
field, "init_spectrum_bool_canfail", "bool",
[False] * 8)
self._sc.checkScalarAttribute(
det, "nexdatas_canfail", "string", "FAILED")
self._sc.checkScalarAttribute(
field, "nexdatas_canfail", "string", "FAILED")
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientIntImage(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="" type="NX_INT" name="pco_int">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</field>
<field units="" type="NX_INT8" name="pco_int8">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</field>
<field units="" type="NX_INT16" name="pco_int16">
<dimensions rank="2" />
<strategy mode="STEP" compression="true" grows="3"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</field>
<field units="" type="NX_INT32" name="pco_int32">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true"
grows="2" shuffle="false" />
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="pco_int64">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</field>
<field units="" type="NX_UINT" name="pco_uint">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="pco_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT64" name="pco_uint64">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" rate="3" grows="3"/>
<datasource type="CLIENT">
<record name="pco_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT8" name="pco_uint8">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" grows="3"/>
<datasource type="CLIENT">
<record name="pco_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT16" name="pco_uint16">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true"/>
<datasource type="CLIENT">
<record name="pco_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT32" name="pco_uint32">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true"
grows="2" shuffle="false" />
<datasource type="CLIENT">
<record name="pco_uint"/>
</datasource>
</field>
<field units="" type="NX_UINT8" name="pco_uint8_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" grows="3" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_uint_canfail"/>
</datasource>
</field>
<field units="" type="NX_UINT16" name="pco_uint16_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_uint_canfail"/>
</datasource>
</field>
<field units="" type="NX_UINT32" name="pco_uint32_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2"
shuffle="false" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_uint_canfail"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="init_pco_int64">
<dimensions rank="2" />
<strategy mode="INIT" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</field>
<field units="" type="NX_UINT" name="final_pco_uint">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="pco_uint"/>
</datasource>
</field>
<field units="" type="NX_INT64" name="init_pco_int64_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="INIT" compression="true" rate="3" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_int_canfail"/>
</datasource>
</field>
<field units="" type="NX_UINT" name="final_pco_uint_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_uint_canfail"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
tdw = self.openWriter(
fname, xml,
json='{"data": { "pco_int":' + str(self._pco1[0]) + ' } }')
pco2 = [[[(el + 100) // 2 for el in rpco] for rpco in pco]
for pco in self._pco1]
flip = True
for pco in self._pco1:
self.record(
tdw, '{"data": { "pco_int":' + str(pco) +
', "pco_uint":' +
str([[(el + 100) // 2 for el in rpco]
for rpco in pco]) +
(', "pco_uint_canfail":' + str(
[[(el + 100) // 2 for el in rpco]
for rpco in pco]) if flip else "") +
' } }')
flip = not flip
self.closeWriter(
tdw, json='{"data": { "pco_uint":' + str(pco2[0]) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 17)
self._sc.checkImageField(
det, "pco_int", "int64", "NX_INT", self._pco1)
self._sc.checkImageField(
det, "pco_int8", "int8", "NX_INT8", self._pco1, grows=2)
self._sc.checkImageField(
det, "pco_int16", "int16", "NX_INT16", self._pco1, grows=3)
self._sc.checkImageField(
det, "pco_int32", "int32", "NX_INT32", self._pco1, grows=2)
self._sc.checkImageField(
det, "pco_int64", "int64", "NX_INT64", self._pco1)
self._sc.checkImageField(det, "pco_uint", "uint64", "NX_UINT", pco2)
self._sc.checkImageField(
det, "pco_uint8", "uint8", "NX_UINT8", pco2, grows=3)
self._sc.checkImageField(
det, "pco_uint16", "uint16", "NX_UINT16", pco2)
self._sc.checkImageField(
det, "pco_uint32", "uint32", "NX_UINT32", pco2, grows=2)
self._sc.checkImageField(
det, "pco_uint64", "uint64", "NX_UINT64", pco2, grows=3)
self._sc.checkSingleImageField(
det, "init_pco_int64", "int64", "NX_INT64", self._pco1[0])
self._sc.checkSingleImageField(
det, "final_pco_uint", "uint64", "NX_UINT", pco2[0])
# self._sc.checkSingleImageField(det, "init_pco_int64_canfail", "int64",
# "NX_INT64", self._pco1[0])
self._sc.checkSingleImageField(
det, "init_pco_int64_canfail", "int64", "NX_INT64",
[[numpy.iinfo(getattr(numpy, 'int64')).max for el in rpco]
for rpco in self._pco1[0]],
attrs={"type": "NX_INT64", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleImageField(
det, "final_pco_uint_canfail", "uint64", "NX_UINT",
[[numpy.iinfo(getattr(numpy, 'int64')).max for el in rpco]
for rpco in self._pco1[0]],
attrs={"type": "NX_UINT", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkImageField(
det, "pco_uint8_canfail", "uint8", "NX_UINT8",
[[[((el + 100) // 2 if not j % 2 else
numpy.iinfo(getattr(numpy, 'uint8')).max)
for el in rpco] for rpco in self._pco1[j]]
for j in range(len(self._pco1))],
grows=3,
attrs={"type": "NX_UINT8", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkImageField(
det, "pco_uint16_canfail", "uint16", "NX_UINT16",
[[[((el + 100) // 2 if not j % 2 else
numpy.iinfo(getattr(numpy, 'uint16')).max)
for el in rpco] for rpco in self._pco1[j]]
for j in range(len(self._pco1))],
grows=1,
attrs={"type": "NX_UINT16", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkImageField(
det, "pco_uint32_canfail", "uint32", "NX_UINT32",
[[[((el + 100) // 2 if not j % 2 else
numpy.iinfo(getattr(numpy, 'uint32')).max)
for el in rpco] for rpco in self._pco1[j]] for j in range(
len(self._pco1))],
grows=2,
attrs={"type": "NX_UINT32", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientFloatImage(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="" type="NX_FLOAT" name="pco_float">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" rate="3"/>
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="pco_float32">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2" shuffle="true"/>
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="pco_float64">
<dimensions rank="2" />
<strategy mode="STEP" grows="3"/>
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</field>
<field units="" type="NX_NUMBER" name="pco_number">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" grows = "1" />
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="pco_float32_canfail">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" compression="true" grows="2" shuffle="true"
canfail="true"/>
<datasource type="CLIENT">
<record name="pco_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="pco_float64_canfail">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" grows="3" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_NUMBER" name="pco_number_canfail">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" grows = "1" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="init_pco_float32">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="INIT" compression="true" shuffle="true"/>
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="final_pco_float64">
<dimensions rank="2" />
<strategy mode="FINAL" />
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</field>
<field units="" type="NX_FLOAT32" name="init_pco_float32_canfail">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="INIT" compression="true" shuffle="true"
canfail="true"/>
<datasource type="CLIENT">
<record name="pco_float_canfail"/>
</datasource>
</field>
<field units="" type="NX_FLOAT64" name="final_pco_float64_canfail">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="FINAL" canfail="true" />
<datasource type="CLIENT">
<record name="pco_float_canfail"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
tdw = self.openWriter(
fname, xml,
json='{"data": { "pco_float":' + str(self._fpco1[0]) + ' } }')
flip = True
for pco in self._fpco1:
self.record(
tdw,
'{"data": { "pco_float":' + str(pco) +
(', "pco_float_canfail":' + str(pco) if flip else "") +
' } }')
flip = not flip
self.closeWriter(
tdw,
json='{"data": { "pco_float":' + str(self._fpco1[0]) + ' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 11)
self._sc.checkImageField(
det, "pco_float", "float64", "NX_FLOAT", self._fpco1,
error=1.0e-14)
self._sc.checkImageField(
det, "pco_float32", "float32", "NX_FLOAT32", self._fpco1,
error=1.0e-6, grows=2)
self._sc.checkImageField(
det, "pco_float64", "float64", "NX_FLOAT64", self._fpco1,
error=1.0e-14, grows=3)
self._sc.checkImageField(
det, "pco_number", "float64", "NX_NUMBER", self._fpco1,
error=1.0e-14, grows=1)
self._sc.checkSingleImageField(
det, "init_pco_float32", "float32", "NX_FLOAT32", self._fpco1[0],
error=1.0e-6)
self._sc.checkSingleImageField(
det, "final_pco_float64", "float64", "NX_FLOAT64", self._fpco1[0],
error=1.0e-14)
self._sc.checkSingleImageField(
det, "init_pco_float32_canfail", "float32", "NX_FLOAT32",
[[numpy.finfo(getattr(numpy, 'float32')).max for el in rpco]
for rpco in self._fpco1[0]],
attrs={
"type": "NX_FLOAT32", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-6)
self._sc.checkSingleImageField(
det, "final_pco_float64_canfail", "float64", "NX_FLOAT64",
[[numpy.finfo(getattr(numpy, 'float64')).max for el in rpco]
for rpco in self._fpco1[0]],
attrs={
"type": "NX_FLOAT64", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-14)
self._sc.checkImageField(
det, "pco_float32_canfail", "float32", "NX_FLOAT32",
[[[(el if not j % 2 else
numpy.finfo(getattr(numpy, 'float32')).max)
for el in rpco] for rpco in self._fpco1[j]] for j in range(
len(self._fpco1))],
grows=2,
attrs={
"type": "NX_FLOAT32", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-6)
self._sc.checkImageField(
det, "pco_float64_canfail", "float64", "NX_FLOAT64",
[[[(el if not j % 2 else
numpy.finfo(getattr(numpy, 'float64')).max)
for el in rpco] for rpco in self._fpco1[j]] for j in range(
len(self._fpco1))],
grows=3,
attrs={
"type": "NX_FLOAT64", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-14)
self._sc.checkImageField(
det, "pco_number_canfail", "float64", "NX_NUMBER",
[[[(el if not j % 2 else
numpy.finfo(getattr(numpy, 'float64')).max)
for el in rpco] for rpco in self._fpco1[j]] for j in range(
len(self._fpco1))],
grows=1,
attrs={"type": "NX_NUMBER", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
error=1.0e-14)
f.close()
os.remove(fname)
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientImage(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<field units="" type="NX_DATE_TIME" name="time">
<strategy mode="STEP" compression="true" rate="3"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
</field>
<field units="" type="ISO8601" name="isotime">
<strategy mode="STEP" compression="true" grows="2" shuffle="true"/>
<dimensions rank="2" />
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="string_time">
<strategy mode="STEP" grows="2"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="flags">
<strategy mode="STEP"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_DATE_TIME" name="time_canfail">
<strategy mode="STEP" compression="true" rate="3" canfail="true"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="timestamps_canfail"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="string_time_canfail">
<strategy mode="STEP" grows="2" canfail="true"/>
<datasource type="CLIENT">
<record name="timestamps_canfail"/>
</datasource>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="flags_canfail">
<strategy mode="STEP" grows="3" canfail="true"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals_canfail"/>
</datasource>
</field>
<field units="" type="NX_BOOLEAN" name="bool_flags">
<strategy mode="STEP"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="bool"/>
</datasource>
</field>
<field units="" type="NX_BOOLEAN" name="flags_dim">
<strategy mode="STEP"/>
<dimensions rank="2" />
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="init_string_time">
<strategy mode="INIT" grows="2"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="final_flags">
<strategy mode="FINAL"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="final_string_time">
<strategy mode="FINAL" grows="2"/>
<datasource type="CLIENT">
<record name="timestamps"/>
</datasource>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="init_flags">
<strategy mode="INIT"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals"/>
</datasource>
</field>
<field units="" type="NX_CHAR" name="final_string_time_canfail">
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="timestamps_canfail"/>
</datasource>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
</field>
<field units="" type="NX_BOOLEAN" name="init_flags_canfail">
<strategy mode="INIT" canfail="true"/>
<dimensions rank="2">
<dim value="3" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<datasource type="CLIENT">
<record name="logicals_canfail"/>
</datasource>
</field>
</group>
</group>
</group>
</definition>
"""
dates = [
[["1996-07-31T21:15:22.123+0600", "2012-11-14T14:05:23.2344-0200",
"2014-02-04T04:16:12.43-0100", "2012-11-14T14:05:23.2344-0200"],
["1996-07-31T21:15:22.123+0600", "2012-11-14T14:05:23.2344-0200",
"2014-02-04T04:16:12.43-0100", "2012-11-14T14:05:23.2344-0200"],
["1996-07-31T21:15:22.123+0600", "2012-11-14T14:05:23.2344-0200",
"2014-02-04T04:16:12.43-0100", "2012-11-14T14:05:23.2344-0200"]],
[["956-05-23T12:12:32.123+0400", "1212-12-12T12:25:43.1267-0700",
"914-11-04T04:13:13.44-0000", "1002-04-03T14:15:03.0012-0300"],
["956-05-23T12:12:32.123+0400", "1212-12-12T12:25:43.1267-0700",
"914-11-04T04:13:13.44-0000", "1002-04-03T14:15:03.0012-0300"],
["956-05-23T12:12:32.123+0400", "1212-12-12T12:25:43.1267-0700",
"914-11-04T04:13:13.44-0000", "1002-04-03T14:15:03.0012-0300"]]]
logical = [
[["1", "0", "true", "false"],
["True", "False", "TrUe", "FaLsE"], ["1", "0", "0", "1"]],
[["0", "1", "true", "false"],
["TrUe", "1", "0", "FaLsE"], ["0", "0", "1", "0"]]]
bools = [
"[ [true,false,true,false], [true,false,true,false], "
"[true,false,false,true]]",
"[ [false,true,true,false], [true,true,false,false], "
"[false,false,true,false]]"]
tdw = self.openWriter(fname, xml, json='{"data": {' +
'"timestamps":' +
str(dates[0]).replace("'", "\"") +
', "logicals":' +
str(logical[0]).replace("'", "\"") +
' } }')
flip = True
for i in range(min(len(dates), len(logical))):
self.record(
tdw,
'{"data": {"timestamps":' + str(dates[i]).replace("'", "\"") +
(', "timestamps_canfail":' + str(
dates[i]).replace("'", "\"") if flip else "") +
(', "logicals_canfail":' + str(
logical[i]).replace("'", "\"") if flip else "") +
', "logicals":' + str(logical[i]).replace("'", "\"") +
', "bool":' + bools[i] +
' } }')
flip = not flip
self.closeWriter(tdw, json='{"data": {' +
'"timestamps":' +
str(dates[0]).replace("'", "\"") +
', "logicals":' +
str(logical[0]).replace("'", "\"") +
' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det = self._sc.checkFieldTree(f, fname, 15)
self._sc.checkImageField(det, "flags", "bool", "NX_BOOLEAN", logical)
self._sc.checkImageField(
det, "bool_flags", "bool", "NX_BOOLEAN", logical)
self._sc.checkImageField(det, "time", "string", "NX_DATE_TIME", dates)
self._sc.checkImageField(
det, "string_time", "string", "NX_CHAR", dates, grows=2)
self._sc.checkImageField(
det, "isotime", "string", "ISO8601", dates, grows=2)
self._sc.checkImageField(
det, "flags_dim", "bool", "NX_BOOLEAN", logical)
self._sc.checkSingleImageField(
det, "init_string_time", "string", "NX_CHAR", dates[0])
self._sc.checkSingleImageField(
det, "final_flags", "bool", "NX_BOOLEAN", logical[0])
self._sc.checkSingleImageField(
det, "final_string_time_canfail", "string", "NX_CHAR",
[['' for el in rpco] for rpco in dates[0]],
attrs={"type": "NX_CHAR", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "FINAL", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkSingleImageField(
det, "init_flags_canfail", "bool", "NX_BOOLEAN",
[[False for el in rpco] for rpco in logical[0]],
attrs={
"type": "NX_BOOLEAN", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "INIT", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkImageField(
det, "flags_canfail", "bool", "NX_BOOLEAN",
[[[(el if not j % 2 else False)
for el in rpco] for rpco in logical[j]] for j in range(
len(logical))],
grows=3,
attrs={
"type": "NX_BOOLEAN", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
self._sc.checkImageField(
det, "string_time_canfail", "string", "NX_CHAR",
[[[(el if not j % 2 else '')
for el in rpco] for rpco in dates[j]] for j in range(
len(dates))],
attrs={
"type": "NX_CHAR", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None},
grows=2)
self._sc.checkImageField(
det, "time_canfail", "string", "NX_DATE_TIME",
[[[(el if not j % 2 else '')
for el in rpco] for rpco in dates[j]] for j in range(
len(dates))],
attrs={
"type": "NX_DATE_TIME", "units": "", "nexdatas_source": None,
"nexdatas_strategy": "STEP", "nexdatas_canfail": "FAILED",
"nexdatas_canfail_error": None})
f.close()
os.remove(fname)
# <field units="" type="NX_DATE_TIME" name="time_canfail">
# <field units="" type="NX_CHAR" name="string_time_canfail">
# <field units="" type="NX_BOOLEAN" name="flags_canfail">
# scanRecord test
# \brief It tests recording of simple h5 file
def test_clientAttrImage(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
fname = '%s/%s%s.h5' % (os.getcwd(), self.__class__.__name__, fun)
xml = """<definition>
<group type="NXentry" name="entry1">
<group type="NXinstrument" name="instrument">
<group type="NXdetector" name="detector">
<attribute type="NX_FLOAT" name="image_float">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</attribute>
<attribute type="NX_INT" name="image_int">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</attribute>
<attribute type="NX_FLOAT" name="image_float_canfail">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_float_canfail"/>
</datasource>
</attribute>
<attribute type="NX_INT" name="image_int_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_int_canfail"/>
</datasource>
</attribute>
<attribute type="NX_INT32" name="image_int32">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="image_bool">
<dimensions rank="2">
<dim value="2" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="flags"/>
</datasource>
</attribute>
</group>
<field type="NX_FLOAT" name="counter">
<attribute type="NX_FLOAT32" name="image_float32">
<dimensions rank="2">
<dim value="20" index="1"/>
<dim value="30" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="pco_float"/>
</datasource>
</attribute>
<attribute type="NX_UINT32" name="image_uint32">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="STEP"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</attribute>
<attribute type="NX_UINT64" name="image_uint64">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="FINAL"/>
<datasource type="CLIENT">
<record name="pco_int"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="image_bool">
<dimensions rank="2">
<dim value="2" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<strategy mode="INIT"/>
<datasource type="CLIENT">
<record name="flags"/>
</datasource>
</attribute>
<attribute type="NX_UINT64" name="image_uint64_canfail">
<dimensions rank="2">
<dim value="10" index="1"/>
<dim value="8" index="2"/>
</dimensions>
<strategy mode="FINAL" canfail="true"/>
<datasource type="CLIENT">
<record name="pco_int_canfail"/>
</datasource>
</attribute>
<attribute type="NX_BOOLEAN" name="image_bool_canfail">
<dimensions rank="2">
<dim value="2" index="1"/>
<dim value="4" index="2"/>
</dimensions>
<strategy mode="INIT" canfail="true"/>
<datasource type="CLIENT">
<record name="flags_canfail"/>
</datasource>
</attribute>
1.2
</field>
</group>
</group>
</definition>
"""
# <attribute type="NX_CHAR" name="flag_spectrum_string">
# <dimensions rank="1">
# <dim value="8" index="1"/>
# </dimensions>
# <strategy mode="STEP"/>
# <datasource type="CLIENT">
# <record name="flags"/>
# </datasource>
# </attribute>
logical = [["1", "0", "true", "false"],
["True", "False", "TrUe", "FaLsE"]]
tdw = self.openWriter(
fname, xml, json='{"data": {' +
' "pco_float":' + str(self._fpco1[0]) +
', "flags":' +
str(logical).replace("'", "\"") +
', "pco_int":' + str(self._pco1[0]) +
' } }')
steps = min(len(self._pco1), len(self._fpco1))
for i in range(steps):
self.record(tdw, '{"data": {' +
' "pco_float":' + str(self._fpco1[i]) +
', "pco_int":' + str(self._pco1[i]) +
', "flags":' + str(logical).replace("'", "\"") +
' } }')
self.closeWriter(tdw, json='{"data": {' +
' "pco_float":' + str(self._fpco1[0]) +
', "pco_int":' + str(self._pco1[0]) +
', "flags":' + str(logical).replace("'", "\"") +
' } }')
# check the created file
FileWriter.writer = H5CppWriter
f = FileWriter.open_file(fname, readonly=True)
det, field = self._sc.checkAttributeTree(f, fname, 8, 8)
self._sc.checkImageAttribute(
det, "image_float", "float64", self._fpco1[steps - 1],
error=1.e-14)
self._sc.checkImageAttribute(det, "image_int", "int64", self._pco1[0])
self._sc.checkImageAttribute(det, "image_bool", "bool", logical)
self._sc.checkImageAttribute(
det, "image_int32", "int32", self._pco1[steps - 1])
self._sc.checkImageAttribute(
field, "image_float32", "float32", self._fpco1[steps - 1],
error=1.e-6)
self._sc.checkImageAttribute(
field, "image_uint32", "uint32", self._pco1[steps - 1])
self._sc.checkImageAttribute(
field, "image_uint64", "uint64", self._pco1[0])
self._sc.checkImageAttribute(field, "image_bool", "bool", logical)
self._sc.checkImageAttribute(
field, "image_uint64_canfail", "uint64",
[[numpy.iinfo(getattr(numpy, 'int64')).max] *
len(self._pco1[0][0])] * len(self._pco1[0]))
self._sc.checkImageAttribute(
field, "image_bool_canfail", "bool",
[[False] * len(logical[0])] * len(logical))
self._sc.checkImageAttribute(
det, "image_float_canfail", "float64",
[[numpy.finfo(getattr(numpy, 'float64')).max] * 30] * 20)
self._sc.checkImageAttribute(
det, "image_int_canfail", "int64",
[[numpy.iinfo(getattr(numpy, 'int64')).max] *
len(self._pco1[0][0])] * len(self._pco1[0]))
# STRING NOT SUPPORTED BY PNINX
self._sc.checkScalarAttribute(
det, "nexdatas_canfail", "string", "FAILED")
self._sc.checkScalarAttribute(
field, "nexdatas_canfail", "string", "FAILED")
f.close()
os.remove(fname)
if __name__ == '__main__':
unittest.main()
|
cmouse/buildbot
|
refs/heads/master
|
master/buildbot/test/fake/fakestats.py
|
5
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.process import buildstep
from buildbot.process.results import SUCCESS
from buildbot.statistics import capture
from buildbot.statistics.storage_backends.base import StatsStorageBase
class FakeStatsStorageService(StatsStorageBase):
"""
Fake Storage service used in unit tests
"""
def __init__(self, stats=None, name='FakeStatsStorageService'):
self.stored_data = []
if not stats:
self.stats = [capture.CaptureProperty("TestBuilder",
'test')]
else:
self.stats = stats
self.name = name
self.captures = []
def thd_postStatsValue(self, post_data, series_name, context=None):
if not context:
context = {}
self.stored_data.append((post_data, series_name, context))
class FakeBuildStep(buildstep.BuildStep):
"""
A fake build step to be used for testing.
"""
def doSomething(self):
self.setProperty("test", 10, "test")
def start(self):
self.doSomething()
return SUCCESS
class FakeInfluxDBClient:
"""
Fake Influx module for testing on systems that don't have influxdb installed.
"""
def __init__(self, *args, **kwargs):
self.points = []
def write_points(self, points):
self.points.extend(points)
|
odooindia/odoo
|
refs/heads/master
|
addons/event/report/__init__.py
|
435
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import report_event_registration
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dntt1/youtube-dl
|
refs/heads/master
|
youtube_dl/__main__.py
|
90
|
#!/usr/bin/env python
from __future__ import unicode_literals
# Execute with
# $ python youtube_dl/__main__.py (2.6+)
# $ python -m youtube_dl (2.7+)
import sys
if __package__ is None and not hasattr(sys, 'frozen'):
# direct call of __main__.py
import os.path
path = os.path.realpath(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(os.path.dirname(path)))
import youtube_dl
if __name__ == '__main__':
youtube_dl.main()
|
VladKha/CodeWars
|
refs/heads/master
|
6 kyu/Decode the Morse code/solve.py
|
1
|
MORSE_CODE = {
'.-': 'A', '-...': 'B', '-.-.': 'C',
'-..': 'D', '.': 'E', '..-.': 'F',
'--.': 'G', '....': 'H', '..': 'I',
'.---': 'G', '-.-': 'K', '.-..': 'L',
'--': 'M', '-.': 'N', '---': 'O',
'.--.': 'P', '--.-': 'Q', '.-.': 'R',
'...': 'S', '-': 'T', '..-': 'U',
'...-': 'V', '.--': 'W', '-..-': 'X',
'-.--': 'Y', '--..': 'Z', '-----': '0',
'.----': '1', '..---': '2', '...--': '3',
'....-': '4', '.....': '5', '-....': '6',
'--...': '7', '---..': '8', '----.': '9',
'.-.-.-': '.', '--..--': ',', '..--..': '?',
}
def decodeMorse(morseCode):
result = ''
for word in morseCode.strip().split(' '):
for letter in word.split(' '):
result += MORSE_CODE[letter]
result += ' '
return result.strip()
|
robin-pt/learning_note_pyGuessNumber
|
refs/heads/master
|
pyGuestNumber/settings.py
|
1
|
"""
Django settings for pyGuestNumber project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n$byhn%t7@&)p9v62yll+7vu22-!7z_q64(js7)ua2392ndx(5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts.apps.AccountsConfig',
'gnumber.apps.GnumberConfig',
'widget_tweaks',
]
MIDDLEWARE = [
# 'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'django.middleware.cache.FetchFromCacheMiddleware',
]
ROOT_URLCONF = 'pyGuestNumber.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pyGuestNumber.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.path.dirname(BASE_DIR), 'pygn_dev.sqlite3'),
}
}
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://localhost:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"SOCKET_CONNECT_TIMEOUT": 5,
"SOCKET_TIMEOUT": 5,
"COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor",
"IGNORE_EXCEPTIONS": True,
}
},
"games": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://localhost:6379/2",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"SOCKET_CONNECT_TIMEOUT": 5,
"SOCKET_TIMEOUT": 5,
"COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor",
"IGNORE_EXCEPTIONS": True,
}
},
"answer": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://localhost:6379/3",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"SOCKET_CONNECT_TIMEOUT": 5,
"SOCKET_TIMEOUT": 5,
"COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor",
"IGNORE_EXCEPTIONS": True,
"CONNECTION_POOL_KWARGS": {"decode_responses": True},
}
}
}
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_CACHE_ALIAS = "default"
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hant'
TIME_ZONE = 'Asia/Taipei'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
LOGIN_URL = '/user/login/'
LOGIN_REDIRECT_URL = '/'
CACHE_MIDDLEWARE_KEY_PREFIX = 'gn'
CACHE_MIDDLEWARE_SECONDS = 60 * 3
|
yongshengwang/hue
|
refs/heads/master
|
desktop/core/ext-py/thrift-0.9.1/src/TMultiplexedProcessor.py
|
146
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.Thrift import TProcessor, TMessageType, TException
from thrift.protocol import TProtocolDecorator, TMultiplexedProtocol
class TMultiplexedProcessor(TProcessor):
def __init__(self):
self.services = {}
def registerProcessor(self, serviceName, processor):
self.services[serviceName] = processor
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin();
if type != TMessageType.CALL & type != TMessageType.ONEWAY:
raise TException("TMultiplex protocol only supports CALL & ONEWAY")
index = name.find(TMultiplexedProtocol.SEPARATOR)
if index < 0:
raise TException("Service name not found in message name: " + name + ". Did you forget to use TMultiplexProtocol in your client?")
serviceName = name[0:index]
call = name[index+len(TMultiplexedProtocol.SEPARATOR):]
if not serviceName in self.services:
raise TException("Service name not found: " + serviceName + ". Did you forget to call registerProcessor()?")
standardMessage = (
call,
type,
seqid
)
return self.services[serviceName].process(StoredMessageProtocol(iprot, standardMessage), oprot)
class StoredMessageProtocol(TProtocolDecorator.TProtocolDecorator):
def __init__(self, protocol, messageBegin):
TProtocolDecorator.TProtocolDecorator.__init__(self, protocol)
self.messageBegin = messageBegin
def readMessageBegin(self):
return self.messageBegin
|
m2dsupsdlclass/lectures-labs
|
refs/heads/master
|
labs/01_keras/solutions/keras_adam.py
|
1
|
# %load solutions/keras_adam_and_adadelta.py
model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="relu"))
model.add(Dense(hidden_dim, activation="relu"))
model.add(Dense(output_dim, activation="softmax"))
optimizer = optimizers.Adam(lr=0.001)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Adam with its default global learning rate of 0.001 tends to work
# in many settings often converge as fast or faster than SGD
# with a well tuned learning rate.
# Adam adapts the learning rate locally for each neuron, this is why
# tuning its default global learning rate is rarely needed.
#
# Reference:
#
# Adam: https://arxiv.org/abs/1412.6980
|
Azulinho/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigip_hostname.py
|
6
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_hostname
short_description: Manage the hostname of a BIG-IP
description:
- Manage the hostname of a BIG-IP.
version_added: "2.3"
options:
hostname:
description:
- Hostname of the BIG-IP host.
required: True
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
extends_documentation_fragment: f5
requirements:
- f5-sdk
author:
- Tim Rupp (@caphrim007)
- Matthew Lam (@mryanlam)
'''
EXAMPLES = r'''
- name: Set the hostname of the BIG-IP
bigip_hostname:
hostname: bigip.localhost.localdomain
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
hostname:
description: The new hostname of the device
returned: changed
type: string
sample: big-ip01.internal
'''
from ansible.module_utils.f5_utils import AnsibleF5Client
from ansible.module_utils.f5_utils import AnsibleF5Parameters
from ansible.module_utils.f5_utils import HAS_F5SDK
from ansible.module_utils.f5_utils import F5ModuleError
try:
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_attributes = ['hostname']
updatables = ['hostname']
returnables = ['hostname']
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def hostname(self):
if self._values['hostname'] is None:
return None
return str(self._values['hostname'])
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
self.changes = Parameters(changed)
if changed:
return True
return False
def exec_module(self):
result = dict()
try:
changed = self.update()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def read_current_from_device(self):
resource = self.client.api.tm.sys.global_settings.load()
result = resource.attrs
return Parameters(result)
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update_on_device(self):
params = self.want.api_params()
resource = self.client.api.tm.sys.global_settings.load()
resource.modify(**params)
self.client.api.tm.cm.devices.exec_cmd(
'mv', name=self.have.hostname, target=self.want.hostname
)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
hostname=dict(
required=True,
default=None,
type='str'
)
)
self.f5_product_name = 'bigip'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
lidabing/xgyp
|
refs/heads/master
|
gyp/test/msvs/config_attrs/gyptest-config_attrs.py
|
297
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that msvs_configuration_attributes and
msbuild_configuration_attributes are applied by using
them to set the OutputDirectory.
"""
import TestGyp
import os
test = TestGyp.TestGyp(workdir='workarea_all',formats=['msvs'])
vc_version = 'VC90'
if os.getenv('GYP_MSVS_VERSION'):
vc_version = ['VC90','VC100'][int(os.getenv('GYP_MSVS_VERSION')) >= 2010]
expected_exe_file = os.path.join(test.workdir, vc_version, 'hello.exe')
test.run_gyp('hello.gyp')
test.build('hello.gyp')
test.must_exist(expected_exe_file)
test.pass_test()
|
denis-pitul/django
|
refs/heads/master
|
django/core/files/uploadhandler.py
|
149
|
"""
Base file upload handler classes, and the built-in concrete subclasses
"""
from __future__ import unicode_literals
from io import BytesIO
from django.conf import settings
from django.core.files.uploadedfile import (
InMemoryUploadedFile, TemporaryUploadedFile,
)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.module_loading import import_string
__all__ = [
'UploadFileException', 'StopUpload', 'SkipFile', 'FileUploadHandler',
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler',
'StopFutureHandlers'
]
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
@python_2_unicode_compatible
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __str__(self):
if self.connection_reset:
return 'StopUpload: Halt current upload.'
else:
return 'StopUpload: Consume request data, then halt.'
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler(object):
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2 ** 10 # : The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.content_type_extra = None
self.request = request
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
self.content_type_extra = content_type_extra
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method')
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method')
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def __init__(self, *args, **kwargs):
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
def new_file(self, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(*args, **kwargs)
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Use the content_length to signal whether or not this handler should be in use.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
self.activated = False
else:
self.activated = True
def new_file(self, *args, **kwargs):
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
if self.activated:
self.file = BytesIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the BytesIO file.
"""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> from django.http import HttpRequest
>>> request = HttpRequest()
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
return import_string(path)(*args, **kwargs)
|
pierrebaque/EM
|
refs/heads/master
|
retrainGaussian.py
|
2
|
from GaussianNet import gaussianNet
from net_functions import *
import os
os.environ["THEANO_FLAGS"] = "device=gpu2, floatX=float32"
from PIL import Image
import pickle
import time
import json
import MyConfig
class gaussian2(gaussianNet):
def __init__(self):
gaussianNet.__init__(self)
self.trainImgsPath = MyConfig.trainImgPath
self.trainLabelsPath = MyConfig.trainLabelPath
self.imgList = []
self.labelList = []
def checkPath(self, path):
if not os.path.exists(path):
os.makedirs(path)
def loadImgList(self, dataPath, data_ext):
files = [f for f in os.listdir(dataPath) if os.path.isfile(dataPath + f)]
files = [i for i in files if i.endswith('.'+data_ext)]
self.imgList = files
def generateLabelList(self, imgList, img_ext):
labelList = [MyConfig.labelName%f[:-(len(img_ext)+1)] for f in imgList]
self.labelList = labelList
def loadjsonData(self, dataPath, jsonfile):
with open(dataPath + jsonfile) as read_file:
data = json.load(read_file)
return np.array(data)
def load_batch(self, local_training_set_indices, train=True, from_generated=False):
batch_size = len(local_training_set_indices)
rgb_list = []
labels_list = []
for idx in local_training_set_indices:
# rgb = np.asarray(Image.open(self.imgs[fid]))[:, :, 0:3]
rgb = np.asarray( Image.open(self.trainImgsPath+self.imgList[idx]) )[:, :, 0:3]
H, W = np.shape(rgb)[0:2]
rgb_theano = rgb.transpose((2, 0, 1))
rgb_theano = rgb_theano.reshape((1, 3, H, W))
rgb_list.append(rgb_theano)
# if train:
# labels = np.clip(np.loadtxt(self.labels_path % fid), -1000, 1000)
CNN_factor = 4
H_lab, W_lab = H / CNN_factor, W / CNN_factor
# print labels.shape
labels = self.loadjsonData(self.trainLabelsPath, self.labelList[idx])
labels = labels.reshape(H_lab, W_lab, 5)
labels = labels.transpose((2, 0, 1))
labels = labels.reshape(1, 5, H_lab, W_lab)
labels_list.append(labels)
x_in = np.concatenate(rgb_list, axis=0)
y_in = np.concatenate(labels_list, axis=0)
return x_in, y_in
def optimize_gaussians_online(self,all_indices,gaussian_minibatch_size = 4,from_generated = False):
number_of_minibatches = len(all_indices)/gaussian_minibatch_size
self.go_zero_sum_func()
#we are computing the sums that will be used to update the gaussians
for b in range(0,number_of_minibatches):
local_indices = all_indices[b*gaussian_minibatch_size:(b+1)*gaussian_minibatch_size]
#print 'gaussian minibatch',b
x_in,y_in = self.load_batch(local_indices,train = True,from_generated = from_generated)
self.train_sums_func(x_in,y_in)
self.train_gaussians(x_in[0:2], y_in[0:2])
return
def train_parts(self, em_it, training_round = 0):
batch_size = 4
generated_training_set_size = len(self.imgList)
epoch_set_size = min(generated_training_set_size,400)
update_gaussian_every_batch_iters = min(generated_training_set_size,100)
gaussian_fitting_size = min(generated_training_set_size,500)
train_logs_path = MyConfig.log_path
self.checkPath(train_logs_path)
params_bg = pickle.load(open(MyConfig.bgParams_path))
self.mBGsub.setParams(params_bg)
if training_round ==0:
log_filename = 'train_%d.txt' % em_it
self.checkPath(train_logs_path)
f_logs = open(train_logs_path+log_filename, 'w')
f_logs.close()
if em_it == 0:
#initilize params
init_gaussian_params = init_all_gaussian_params(self.n_leaves)
load_gaussian_params_fromshared(self.params_gaussian, init_gaussian_params)
random_reg_params = self.regression_net.get_random_regression_params()
self.regression_net.load_regression_params(random_reg_params)
print 'finished initialization'
else:
#load parameters from previous em iteration
params_regression = pickle.load(
open(MyConfig.net_params_path + 'EM%d_params_regression.pickle' % (em_it - 1)))
self.regression_net.load_regression_params(params_regression)
gaussian_params = pickle.load(
open(MyConfig.net_params_path + 'EM%d_params_gaussian.pickle' % (em_it - 1)))
load_gaussian_params(self.params_gaussian, gaussian_params)
else:
#load parameters from previous training iteration
params_regression = pickle.load(open(MyConfig.net_params_path
+ 'params_regression_%d.pickle' % (training_round-1)))
self.regression_net.load_regression_params(params_regression)
gaussian_params = pickle.load(open(MyConfig.net_params_path
+ 'params_gaussian_%d.pickle' % (training_round-1)))
load_gaussian_params(self.params_gaussian, gaussian_params)
#learning regression parameters
for iterIdx in range(training_round, MyConfig.epochs):
print'epoch %d' %(iterIdx)
generated_training_set_order = np.random.permutation(np.arange(0, generated_training_set_size))
# Train
av_cost = 0
for batch in range(0,epoch_set_size/batch_size):
local_training_set_indices = generated_training_set_order[batch*batch_size:(batch+1)*batch_size]
x_in,y_in = self.load_batch(local_training_set_indices,train = True,from_generated = True)
t_start = time.time()
cost = self.train_decision_func(x_in,y_in)[0]
t_end = time.time()
print 'regression training time %f' % (t_end - t_start)
av_cost+=cost
#Optimise gaussian
local_training_set_indices = generated_training_set_order[batch*batch_size:
batch*batch_size+gaussian_fitting_size]
if batch % update_gaussian_every_batch_iters == update_gaussian_every_batch_iters - 1:
self.optimize_gaussians_online(local_training_set_indices,from_generated=True)
av_cost = av_cost / (epoch_set_size/batch_size)
print 'av_cost = %f' %av_cost
f_logs = open(train_logs_path+log_filename, 'a')
f_logs.write('%f' % (av_cost) + '\n')
f_logs.close()
# Save Params after each two iterations
if iterIdx % 2 == 0:
params_regression = self.regression_net.save_regression_params() # load regression params to the var on left side of =
gaussian_params = save_gaussian_params(self.params_gaussian)
self.checkPath(MyConfig.net_params_path)
with open(MyConfig.net_params_path + 'params_regression%d_%d.pickle'%(em_it, iterIdx), 'wb') as a:
pickle.dump(params_regression, a)
with open(MyConfig.net_params_path + 'params_gaussian%d_%d.pickle'%(em_it, iterIdx), 'wb') as a:
pickle.dump(gaussian_params, a)
# Run small test
#self.run_test(em_it, reload_params=False, name='test_em%d_it%d_' % (em_it, iterIdx))
params_regression = self.regression_net.save_regression_params()
gaussian_params = save_gaussian_params(self.params_gaussian)
self.checkPath(MyConfig.net_params_path)
with open(MyConfig.net_params_path + 'EM%d_params_regression.pickle'%em_it,'wb') as a:
pickle.dump(params_regression,a)
with open(MyConfig.net_params_path + 'EM%d_params_gaussian.pickle'%em_it,'wb') as a:
pickle.dump(gaussian_params,a)
def main(reloadData = False):
gaussianModel = gaussian2()
gaussianModel.loadImgList( gaussianModel.trainImgsPath, MyConfig.imgExt)
gaussianModel.generateLabelList( gaussianModel.imgList, MyConfig.imgExt)
print 'start training'
for em_iter in range(MyConfig.iterations):
gaussianModel.train_parts(em_iter)
if __name__ =="__main__":
main()
|
mou4e/zirconium
|
refs/heads/master
|
tools/perf/measurements/draw_properties.py
|
4
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.platform import tracing_category_filter
from telemetry.core.platform import tracing_options
from telemetry.page import page_test
from telemetry.timeline import model
from telemetry.value import scalar
class DrawProperties(page_test.PageTest):
def __init__(self):
super(DrawProperties, self).__init__()
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs([
'--enable-property-tree-verification',
'--enable-prefer-compositing-to-lcd-text',
])
def WillNavigateToPage(self, page, tab):
options = tracing_options.TracingOptions()
options.enable_chrome_trace = True
category_filter = tracing_category_filter.TracingCategoryFilter(
'disabled-by-default-cc.debug.cdp-perf')
tab.browser.platform.tracing_controller.Start(options, category_filter)
def ComputeAverageAndSumOfDurations(self, timeline_model, name):
events = timeline_model.GetAllEventsOfName(name)
event_durations = [d.duration for d in events]
assert event_durations, 'Failed to find durations'
duration_sum = sum(event_durations)
duration_count = len(event_durations)
duration_avg = duration_sum / duration_count
return (duration_avg, duration_sum)
def ValidateAndMeasurePage(self, page, tab, results):
timeline_data = tab.browser.platform.tracing_controller.Stop()
timeline_model = model.TimelineModel(timeline_data)
(cdp_avg, cdp_sum) = self.ComputeAverageAndSumOfDurations(
timeline_model,
"LayerTreeHostCommon::CalculateDrawProperties");
(pt_avg, pt_sum) = self.ComputeAverageAndSumOfDurations(
timeline_model,
"LayerTreeHostCommon::ComputeVisibleRectsWithPropertyTrees");
reduction = 100.0 * (1.0 - (pt_sum / cdp_sum))
results.AddValue(scalar.ScalarValue(
results.current_page, 'CDP_reduction', ' %', reduction,
description='Reduction in CDP cost with property trees'))
results.AddValue(scalar.ScalarValue(
results.current_page, 'CDP_avg_cost', 'ms', cdp_avg,
description='Average time spent in CDP'))
results.AddValue(scalar.ScalarValue(
results.current_page, 'PT_avg_cost', 'ms', pt_avg,
description='Average time spent processing property trees'))
def CleanUpAfterPage(self, page, tab):
tracing_controller = tab.browser.platform.tracing_controller
if tracing_controller.is_tracing_running:
tracing_controller.Stop()
|
adamchau/essay_simulation
|
refs/heads/master
|
MAS/plot_mas.py
|
1
|
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 22 09:07:07 2014
@author: ydzhao
"""
from ev_tr import *
if __name__=="__main__":
'''
mas consensus
'''
plt.figure()
for i in range(MAS1.agent_num):
agent1_var=[data[i] for data in MAS1.status_his]
plt.plot(MAS1.T,agent1_var)
'''
event-triggered consensus
'''
plt.figure()
for i in range(ev_MAS1.agent_num):
agent1_var=[data[i] for data in ev_MAS1.status_his]
plt.plot(ev_MAS1.T,agent1_var)
plt.figure()
plt.plot(ev_MAS1.T[0:800],ev_MAS1.error_his[0:800])
plt.plot(ev_MAS1.T[0:800],ev_MAS1.bound_his[0:800])
|
samithaj/headphones
|
refs/heads/master
|
lib/cherrypy/lib/auth_digest.py
|
49
|
# This file is part of CherryPy <http://www.cherrypy.org/>
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
__doc__ = """An implementation of the server-side of HTTP Digest Access
Authentication, which is described in :rfc:`2617`.
Example usage, using the built-in get_ha1_dict_plain function which uses a dict
of plaintext passwords as the credentials store::
userpassdict = {'alice' : '4x5istwelve'}
get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(userpassdict)
digest_auth = {'tools.auth_digest.on': True,
'tools.auth_digest.realm': 'wonderland',
'tools.auth_digest.get_ha1': get_ha1,
'tools.auth_digest.key': 'a565c27146791cfb',
}
app_config = { '/' : digest_auth }
"""
__author__ = 'visteya'
__date__ = 'April 2009'
import time
from cherrypy._cpcompat import parse_http_list, parse_keqv_list
import cherrypy
from cherrypy._cpcompat import md5, ntob
md5_hex = lambda s: md5(ntob(s)).hexdigest()
qop_auth = 'auth'
qop_auth_int = 'auth-int'
valid_qops = (qop_auth, qop_auth_int)
valid_algorithms = ('MD5', 'MD5-sess')
def TRACE(msg):
cherrypy.log(msg, context='TOOLS.AUTH_DIGEST')
# Three helper functions for users of the tool, providing three variants
# of get_ha1() functions for three different kinds of credential stores.
def get_ha1_dict_plain(user_password_dict):
"""Returns a get_ha1 function which obtains a plaintext password from a
dictionary of the form: {username : password}.
If you want a simple dictionary-based authentication scheme, with plaintext
passwords, use get_ha1_dict_plain(my_userpass_dict) as the value for the
get_ha1 argument to digest_auth().
"""
def get_ha1(realm, username):
password = user_password_dict.get(username)
if password:
return md5_hex('%s:%s:%s' % (username, realm, password))
return None
return get_ha1
def get_ha1_dict(user_ha1_dict):
"""Returns a get_ha1 function which obtains a HA1 password hash from a
dictionary of the form: {username : HA1}.
If you want a dictionary-based authentication scheme, but with
pre-computed HA1 hashes instead of plain-text passwords, use
get_ha1_dict(my_userha1_dict) as the value for the get_ha1
argument to digest_auth().
"""
def get_ha1(realm, username):
return user_ha1_dict.get(username)
return get_ha1
def get_ha1_file_htdigest(filename):
"""Returns a get_ha1 function which obtains a HA1 password hash from a
flat file with lines of the same format as that produced by the Apache
htdigest utility. For example, for realm 'wonderland', username 'alice',
and password '4x5istwelve', the htdigest line would be::
alice:wonderland:3238cdfe91a8b2ed8e39646921a02d4c
If you want to use an Apache htdigest file as the credentials store,
then use get_ha1_file_htdigest(my_htdigest_file) as the value for the
get_ha1 argument to digest_auth(). It is recommended that the filename
argument be an absolute path, to avoid problems.
"""
def get_ha1(realm, username):
result = None
f = open(filename, 'r')
for line in f:
u, r, ha1 = line.rstrip().split(':')
if u == username and r == realm:
result = ha1
break
f.close()
return result
return get_ha1
def synthesize_nonce(s, key, timestamp=None):
"""Synthesize a nonce value which resists spoofing and can be checked
for staleness. Returns a string suitable as the value for 'nonce' in
the www-authenticate header.
s
A string related to the resource, such as the hostname of the server.
key
A secret string known only to the server.
timestamp
An integer seconds-since-the-epoch timestamp
"""
if timestamp is None:
timestamp = int(time.time())
h = md5_hex('%s:%s:%s' % (timestamp, s, key))
nonce = '%s:%s' % (timestamp, h)
return nonce
def H(s):
"""The hash function H"""
return md5_hex(s)
class HttpDigestAuthorization (object):
"""Class to parse a Digest Authorization header and perform re-calculation
of the digest.
"""
def errmsg(self, s):
return 'Digest Authorization header: %s' % s
def __init__(self, auth_header, http_method, debug=False):
self.http_method = http_method
self.debug = debug
scheme, params = auth_header.split(" ", 1)
self.scheme = scheme.lower()
if self.scheme != 'digest':
raise ValueError('Authorization scheme is not "Digest"')
self.auth_header = auth_header
# make a dict of the params
items = parse_http_list(params)
paramsd = parse_keqv_list(items)
self.realm = paramsd.get('realm')
self.username = paramsd.get('username')
self.nonce = paramsd.get('nonce')
self.uri = paramsd.get('uri')
self.method = paramsd.get('method')
self.response = paramsd.get('response') # the response digest
self.algorithm = paramsd.get('algorithm', 'MD5').upper()
self.cnonce = paramsd.get('cnonce')
self.opaque = paramsd.get('opaque')
self.qop = paramsd.get('qop') # qop
self.nc = paramsd.get('nc') # nonce count
# perform some correctness checks
if self.algorithm not in valid_algorithms:
raise ValueError(
self.errmsg("Unsupported value for algorithm: '%s'" %
self.algorithm))
has_reqd = (
self.username and
self.realm and
self.nonce and
self.uri and
self.response
)
if not has_reqd:
raise ValueError(
self.errmsg("Not all required parameters are present."))
if self.qop:
if self.qop not in valid_qops:
raise ValueError(
self.errmsg("Unsupported value for qop: '%s'" % self.qop))
if not (self.cnonce and self.nc):
raise ValueError(
self.errmsg("If qop is sent then "
"cnonce and nc MUST be present"))
else:
if self.cnonce or self.nc:
raise ValueError(
self.errmsg("If qop is not sent, "
"neither cnonce nor nc can be present"))
def __str__(self):
return 'authorization : %s' % self.auth_header
def validate_nonce(self, s, key):
"""Validate the nonce.
Returns True if nonce was generated by synthesize_nonce() and the
timestamp is not spoofed, else returns False.
s
A string related to the resource, such as the hostname of
the server.
key
A secret string known only to the server.
Both s and key must be the same values which were used to synthesize
the nonce we are trying to validate.
"""
try:
timestamp, hashpart = self.nonce.split(':', 1)
s_timestamp, s_hashpart = synthesize_nonce(
s, key, timestamp).split(':', 1)
is_valid = s_hashpart == hashpart
if self.debug:
TRACE('validate_nonce: %s' % is_valid)
return is_valid
except ValueError: # split() error
pass
return False
def is_nonce_stale(self, max_age_seconds=600):
"""Returns True if a validated nonce is stale. The nonce contains a
timestamp in plaintext and also a secure hash of the timestamp.
You should first validate the nonce to ensure the plaintext
timestamp is not spoofed.
"""
try:
timestamp, hashpart = self.nonce.split(':', 1)
if int(timestamp) + max_age_seconds > int(time.time()):
return False
except ValueError: # int() error
pass
if self.debug:
TRACE("nonce is stale")
return True
def HA2(self, entity_body=''):
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
# RFC 2617 3.2.2.3
# If the "qop" directive's value is "auth" or is unspecified,
# then A2 is:
# A2 = method ":" digest-uri-value
#
# If the "qop" value is "auth-int", then A2 is:
# A2 = method ":" digest-uri-value ":" H(entity-body)
if self.qop is None or self.qop == "auth":
a2 = '%s:%s' % (self.http_method, self.uri)
elif self.qop == "auth-int":
a2 = "%s:%s:%s" % (self.http_method, self.uri, H(entity_body))
else:
# in theory, this should never happen, since I validate qop in
# __init__()
raise ValueError(self.errmsg("Unrecognized value for qop!"))
return H(a2)
def request_digest(self, ha1, entity_body=''):
"""Calculates the Request-Digest. See :rfc:`2617` section 3.2.2.1.
ha1
The HA1 string obtained from the credentials store.
entity_body
If 'qop' is set to 'auth-int', then A2 includes a hash
of the "entity body". The entity body is the part of the
message which follows the HTTP headers. See :rfc:`2617` section
4.3. This refers to the entity the user agent sent in the
request which has the Authorization header. Typically GET
requests don't have an entity, and POST requests do.
"""
ha2 = self.HA2(entity_body)
# Request-Digest -- RFC 2617 3.2.2.1
if self.qop:
req = "%s:%s:%s:%s:%s" % (
self.nonce, self.nc, self.cnonce, self.qop, ha2)
else:
req = "%s:%s" % (self.nonce, ha2)
# RFC 2617 3.2.2.2
#
# If the "algorithm" directive's value is "MD5" or is unspecified,
# then A1 is:
# A1 = unq(username-value) ":" unq(realm-value) ":" passwd
#
# If the "algorithm" directive's value is "MD5-sess", then A1 is
# calculated only once - on the first request by the client following
# receipt of a WWW-Authenticate challenge from the server.
# A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
# ":" unq(nonce-value) ":" unq(cnonce-value)
if self.algorithm == 'MD5-sess':
ha1 = H('%s:%s:%s' % (ha1, self.nonce, self.cnonce))
digest = H('%s:%s' % (ha1, req))
return digest
def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth,
stale=False):
"""Constructs a WWW-Authenticate header for Digest authentication."""
if qop not in valid_qops:
raise ValueError("Unsupported value for qop: '%s'" % qop)
if algorithm not in valid_algorithms:
raise ValueError("Unsupported value for algorithm: '%s'" % algorithm)
if nonce is None:
nonce = synthesize_nonce(realm, key)
s = 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
realm, nonce, algorithm, qop)
if stale:
s += ', stale="true"'
return s
def digest_auth(realm, get_ha1, key, debug=False):
"""A CherryPy tool which hooks at before_handler to perform
HTTP Digest Access Authentication, as specified in :rfc:`2617`.
If the request has an 'authorization' header with a 'Digest' scheme,
this tool authenticates the credentials supplied in that header.
If the request has no 'authorization' header, or if it does but the
scheme is not "Digest", or if authentication fails, the tool sends
a 401 response with a 'WWW-Authenticate' Digest header.
realm
A string containing the authentication realm.
get_ha1
A callable which looks up a username in a credentials store
and returns the HA1 string, which is defined in the RFC to be
MD5(username : realm : password). The function's signature is:
``get_ha1(realm, username)``
where username is obtained from the request's 'authorization' header.
If username is not found in the credentials store, get_ha1() returns
None.
key
A secret string known only to the server, used in the synthesis
of nonces.
"""
request = cherrypy.serving.request
auth_header = request.headers.get('authorization')
nonce_is_stale = False
if auth_header is not None:
try:
auth = HttpDigestAuthorization(
auth_header, request.method, debug=debug)
except ValueError:
raise cherrypy.HTTPError(
400, "The Authorization header could not be parsed.")
if debug:
TRACE(str(auth))
if auth.validate_nonce(realm, key):
ha1 = get_ha1(realm, auth.username)
if ha1 is not None:
# note that for request.body to be available we need to
# hook in at before_handler, not on_start_resource like
# 3.1.x digest_auth does.
digest = auth.request_digest(ha1, entity_body=request.body)
if digest == auth.response: # authenticated
if debug:
TRACE("digest matches auth.response")
# Now check if nonce is stale.
# The choice of ten minutes' lifetime for nonce is somewhat
# arbitrary
nonce_is_stale = auth.is_nonce_stale(max_age_seconds=600)
if not nonce_is_stale:
request.login = auth.username
if debug:
TRACE("authentication of %s successful" %
auth.username)
return
# Respond with 401 status and a WWW-Authenticate header
header = www_authenticate(realm, key, stale=nonce_is_stale)
if debug:
TRACE(header)
cherrypy.serving.response.headers['WWW-Authenticate'] = header
raise cherrypy.HTTPError(
401, "You are not authorized to access that resource")
|
nmartensen/pandas
|
refs/heads/master
|
asv_bench/benchmarks/panel_methods.py
|
7
|
from .pandas_vb_common import *
class PanelMethods(object):
goal_time = 0.2
def setup(self):
self.index = date_range(start='2000', freq='D', periods=1000)
self.panel = Panel(np.random.randn(100, len(self.index), 1000))
def time_pct_change_items(self):
self.panel.pct_change(1, axis='items')
def time_pct_change_major(self):
self.panel.pct_change(1, axis='major')
def time_pct_change_minor(self):
self.panel.pct_change(1, axis='minor')
def time_shift(self):
self.panel.shift(1)
def time_shift_minor(self):
self.panel.shift(1, axis='minor')
|
mancoast/CPythonPyc_test
|
refs/heads/master
|
cpython/253_test_urllibnet.py
|
19
|
#!/usr/bin/env python
import unittest
from test import test_support
import socket
import urllib
import sys
import os
import mimetools
class URLTimeoutTest(unittest.TestCase):
TIMEOUT = 10.0
def setUp(self):
socket.setdefaulttimeout(self.TIMEOUT)
def tearDown(self):
socket.setdefaulttimeout(None)
def testURLread(self):
f = urllib.urlopen("http://www.python.org/")
x = f.read()
class urlopenNetworkTests(unittest.TestCase):
"""Tests urllib.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
setUp is not used for always constructing a connection to
http://www.python.org/ since there a few tests that don't use that address
and making a connection is expensive enough to warrant minimizing unneeded
connections.
"""
def test_basic(self):
# Simple test expected to pass.
open_url = urllib.urlopen("http://www.python.org/")
for attr in ("read", "readline", "readlines", "fileno", "close",
"info", "geturl"):
self.assert_(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assert_(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
def test_readlines(self):
# Test both readline and readlines.
open_url = urllib.urlopen("http://www.python.org/")
try:
self.assert_(isinstance(open_url.readline(), basestring),
"readline did not return a string")
self.assert_(isinstance(open_url.readlines(), list),
"readlines did not return a list")
finally:
open_url.close()
def test_info(self):
# Test 'info'.
open_url = urllib.urlopen("http://www.python.org/")
try:
info_obj = open_url.info()
finally:
open_url.close()
self.assert_(isinstance(info_obj, mimetools.Message),
"object returned by 'info' is not an instance of "
"mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "html")
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
URL = "http://www.python.org/"
open_url = urllib.urlopen(URL)
try:
gotten_url = open_url.geturl()
finally:
open_url.close()
self.assertEqual(gotten_url, URL)
def test_fileno(self):
if (sys.platform in ('win32',) or
not hasattr(os, 'fdopen')):
# On Windows, socket handles are not file descriptors; this
# test can't pass on Windows.
return
# Make sure fd returned by fileno is valid.
open_url = urllib.urlopen("http://www.python.org/")
fd = open_url.fileno()
FILE = os.fdopen(fd)
try:
self.assert_(FILE.read(), "reading from file created using fd "
"returned by fileno failed")
finally:
FILE.close()
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
self.assertRaises(IOError,
# SF patch 809915: In Sep 2003, VeriSign started
# highjacking invalid .com and .net addresses to
# boost traffic to their own site. This test
# started failing then. One hopes the .invalid
# domain will be spared to serve its defined
# purpose.
# urllib.urlopen, "http://www.sadflkjsasadf.com/")
urllib.urlopen, "http://www.python.invalid./")
class urlretrieveNetworkTests(unittest.TestCase):
"""Tests urllib.urlretrieve using the network."""
def test_basic(self):
# Test basic functionality.
file_location,info = urllib.urlretrieve("http://www.python.org/")
self.assert_(os.path.exists(file_location), "file location returned by"
" urlretrieve is not a valid path")
FILE = file(file_location)
try:
self.assert_(FILE.read(), "reading from the file location returned"
" by urlretrieve failed")
finally:
FILE.close()
os.unlink(file_location)
def test_specified_path(self):
# Make sure that specifying the location of the file to write to works.
file_location,info = urllib.urlretrieve("http://www.python.org/",
test_support.TESTFN)
self.assertEqual(file_location, test_support.TESTFN)
self.assert_(os.path.exists(file_location))
FILE = file(file_location)
try:
self.assert_(FILE.read(), "reading from temporary file failed")
finally:
FILE.close()
os.unlink(file_location)
def test_header(self):
# Make sure header returned as 2nd value from urlretrieve is good.
file_location, header = urllib.urlretrieve("http://www.python.org/")
os.unlink(file_location)
self.assert_(isinstance(header, mimetools.Message),
"header is not an instance of mimetools.Message")
def test_main():
test_support.requires('network')
test_support.run_unittest(URLTimeoutTest,
urlopenNetworkTests,
urlretrieveNetworkTests)
if __name__ == "__main__":
test_main()
|
yavuzovski/playground
|
refs/heads/master
|
python/django/RESTTest/.venv/lib/python3.4/site-packages/django/db/backends/sqlite3/features.py
|
49
|
from __future__ import unicode_literals
from django.db import utils
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils import six
from django.utils.functional import cached_property
from .base import Database
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
supports_1000_query_parameters = False
supports_mixed_date_datetime_comparisons = False
has_bulk_insert = True
supports_foreign_keys = False
supports_column_check_constraints = False
autocommits_when_autocommit_is_off = True
can_introspect_decimal_field = False
can_introspect_positive_integer_field = True
can_introspect_small_integer_field = True
supports_transactions = True
atomic_transactions = False
can_rollback_ddl = True
supports_paramstyle_pyformat = False
supports_sequence_reset = False
can_clone_databases = True
supports_temporal_subtraction = True
ignores_table_name_case = True
@cached_property
def uses_savepoints(self):
return Database.sqlite_version_info >= (3, 6, 8)
@cached_property
def supports_index_column_ordering(self):
return Database.sqlite_version_info >= (3, 3, 0)
@cached_property
def can_release_savepoints(self):
return self.uses_savepoints
@cached_property
def can_share_in_memory_db(self):
return (
six.PY3 and
Database.__name__ == 'sqlite3.dbapi2' and
Database.sqlite_version_info >= (3, 7, 13)
)
@cached_property
def supports_stddev(self):
"""Confirm support for STDDEV and related stats functions
SQLite supports STDDEV as an extension package; so
connection.ops.check_expression_support() can't unilaterally
rule out support for STDDEV. We need to manually check
whether the call works.
"""
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
try:
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
has_support = True
except utils.DatabaseError:
has_support = False
cursor.execute('DROP TABLE STDDEV_TEST')
return has_support
|
poepublic/shareabouts-bikesharela
|
refs/heads/master
|
src/flavors/bikesharela/scripts/index_images.py
|
2
|
#!/usr/bin/env python
"""
This script is just used to write the image index. To actually resize the images
you could use a command like:
# Resize JPGs in current folder to height=600
mogrify -geometry x600 *.JPG
"""
from __future__ import print_function, unicode_literals
import json
import os
from os.path import dirname, abspath, join as pathjoin
METADATA_ROOT = abspath(pathjoin(abspath(dirname(__file__)), '..', 'static', 'images', 'metadata'))
IMAGES_ROOT = abspath(pathjoin(abspath(dirname(__file__)), '..', 'static', 'images', 'scaled'))
current_index = None
images = None
def write_current_index():
if images is not None:
with open(pathjoin(METADATA_ROOT, current_index + '_images.json'), 'w') as jsonfile:
json.dump({'images': images}, jsonfile)
for _, _, filenames in os.walk(IMAGES_ROOT):
for filename in sorted(filenames):
if filename.lower().endswith('.jpg'):
last_underscore = filename.rfind('_')
index = filename[:last_underscore]
if index != current_index:
# Write image index if we have one
write_current_index()
# Start a new index
images = []
current_index = index
images.append(filename)
write_current_index()
|
bitifirefly/edx-platform
|
refs/heads/master
|
lms/envs/test.py
|
8
|
# -*- coding: utf-8 -*-
"""
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
# Pylint gets confused by path.py instances, which report themselves as class
# objects. As a result, pylint applies the wrong regex in validating names,
# and throws spurious errors. Therefore, we disable invalid-name checking.
# pylint: disable=invalid-name
from .common import *
import os
from path import Path as path
from uuid import uuid4
from warnings import filterwarnings, simplefilter
from openedx.core.lib.tempdir import mkdtemp_clean
# Silence noisy logs to make troubleshooting easier when tests fail.
import logging
LOG_OVERRIDES = [
('factory.generate', logging.ERROR),
('factory.containers', logging.ERROR),
]
for log_name, log_level in LOG_OVERRIDES:
logging.getLogger(log_name).setLevel(log_level)
# mongo connection settings
MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017'))
MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost')
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8000-9000'
THIS_UUID = uuid4().hex[:5]
# can't test start dates with this True, but on the other hand,
# can test everything else :)
FEATURES['DISABLE_START_DATES'] = True
# Most tests don't use the discussion service, so we turn it off to speed them up.
# Tests that do can enable this flag, but must use the UrlResetMixin class to force urls.py
# to reload. For consistency in user-experience, keep the value of this setting in sync with
# the one in cms/envs/test.py
FEATURES['ENABLE_DISCUSSION_SERVICE'] = False
FEATURES['ENABLE_SERVICE_STATUS'] = True
FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True
FEATURES['ENABLE_INSTRUCTOR_LEGACY_DASHBOARD'] = True
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
FEATURES['ENABLE_CREDIT_API'] = True
# Enable this feature for course staff grade downloads, to enable acceptance tests
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
# Toggles embargo on for testing
FEATURES['EMBARGO'] = True
FEATURES['ENABLE_COMBINED_LOGIN_REGISTRATION'] = True
# Need wiki for courseware views to work. TODO (vshnayder): shouldn't need it.
WIKI_ENABLED = True
# Enable a parental consent age limit for testing
PARENTAL_CONSENT_AGE_LIMIT = 13
# Makes the tests run much faster...
SOUTH_TESTS_MIGRATE = False # To disable migrations and use syncdb instead
# Nose Test Runner
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
_SYSTEM = 'lms'
_REPORT_DIR = REPO_ROOT / 'reports' / _SYSTEM
_REPORT_DIR.makedirs_p()
_NOSEID_DIR = REPO_ROOT / '.testids' / _SYSTEM
_NOSEID_DIR.makedirs_p()
NOSE_ARGS = [
'--id-file', _NOSEID_DIR / 'noseids',
'--xunit-file', _REPORT_DIR / 'nosetests.xml',
]
# Local Directories
TEST_ROOT = path("test_root")
# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"
STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json"
COURSES_ROOT = TEST_ROOT / "data"
DATA_DIR = COURSES_ROOT
COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"
# Where the content data is checked out. This may not exist on jenkins.
GITHUB_REPO_ROOT = ENV_ROOT / "data"
USE_I18N = True
LANGUAGE_CODE = 'en' # tests assume they will get English.
XQUEUE_INTERFACE = {
"url": "http://sandbox-xqueue.edx.org",
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds
# Don't rely on a real staff grading backend
MOCK_STAFF_GRADING = True
MOCK_PEER_GRADING = True
# TODO (cpennington): We need to figure out how envs/test.py can inject things
# into common.py so that we don't have to repeat this sort of thing
STATICFILES_DIRS = [
COMMON_ROOT / "static",
PROJECT_ROOT / "static",
]
STATICFILES_DIRS += [
(course_dir, COMMON_TEST_DATA_ROOT / course_dir)
for course_dir in os.listdir(COMMON_TEST_DATA_ROOT)
if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir)
]
# Avoid having to run collectstatic before the unit test suite
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
PIPELINE_ENABLED = False
update_module_store_settings(
MODULESTORE,
module_store_options={
'fs_root': TEST_ROOT / "data",
},
xml_store_options={
'data_dir': mkdtemp_clean(dir=TEST_ROOT), # never inadvertently load all the XML courses
},
doc_store_settings={
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'test_modulestore{0}'.format(THIS_UUID),
},
)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'xcontent',
'port': MONGO_PORT_NUM,
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'edx.db'
},
}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'mongo_metadata_inheritance': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'mongo_metadata_inheritance'),
'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
'course_structure_cache': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
# hide ratelimit warnings while running tests
filterwarnings('ignore', message='No request passed to the backend, unable to rate-limit')
# Ignore deprecation warnings (so we don't clutter Jenkins builds/production)
# https://docs.python.org/2/library/warnings.html#the-warnings-filter
# Change to "default" to see the first instance of each hit
# or "error" to convert all into errors
simplefilter('ignore')
############################# SECURITY SETTINGS ################################
# Default to advanced security in common.py, so tests can reset here to use
# a simpler security model
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
######### Third-party auth ##########
FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True
AUTHENTICATION_BACKENDS = (
'social.backends.google.GoogleOAuth2',
'social.backends.linkedin.LinkedinOAuth2',
'social.backends.facebook.FacebookOAuth2',
'social.backends.twitter.TwitterOAuth',
'third_party_auth.dummy.DummyBackend',
'third_party_auth.saml.SAMLAuthBackend',
'third_party_auth.lti.LTIAuthBackend',
) + AUTHENTICATION_BACKENDS
################################## OPENID #####################################
FEATURES['AUTH_USE_OPENID'] = True
FEATURES['AUTH_USE_OPENID_PROVIDER'] = True
################################## SHIB #######################################
FEATURES['AUTH_USE_SHIB'] = True
FEATURES['SHIB_DISABLE_TOS'] = True
FEATURES['RESTRICT_ENROLL_BY_REG_METHOD'] = True
OPENID_CREATE_USERS = False
OPENID_UPDATE_DETAILS_FROM_SREG = True
OPENID_USE_AS_ADMIN_LOGIN = False
OPENID_PROVIDER_TRUSTED_ROOTS = ['*']
############################## OAUTH2 Provider ################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
########################### External REST APIs #################################
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_MOBILE_SOCIAL_FACEBOOK_FEATURES'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
FEATURES['ENABLE_COURSE_BLOCKS_NAVIGATION_API'] = True
FEATURES['ENABLE_RENDER_XBLOCK_API'] = True
###################### Payment ##############################3
# Enable fake payment processing page
FEATURES['ENABLE_PAYMENT_FAKE'] = True
# Configure the payment processor to use the fake processing page
# Since both the fake payment page and the shoppingcart app are using
# the same settings, we can generate this randomly and guarantee
# that they are using the same secret.
from random import choice
from string import letters, digits, punctuation # pylint: disable=deprecated-module
RANDOM_SHARED_SECRET = ''.join(
choice(letters + digits + punctuation)
for x in range(250)
)
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR['CyberSource2']['SECRET_KEY'] = RANDOM_SHARED_SECRET
CC_PROCESSOR['CyberSource2']['ACCESS_KEY'] = "0123456789012345678901"
CC_PROCESSOR['CyberSource2']['PROFILE_ID'] = "edx"
CC_PROCESSOR['CyberSource2']['PURCHASE_ENDPOINT'] = "/shoppingcart/payment_fake"
FEATURES['STORE_BILLING_INFO'] = True
########################### SYSADMIN DASHBOARD ################################
FEATURES['ENABLE_SYSADMIN_DASHBOARD'] = True
GIT_REPO_DIR = TEST_ROOT / "course_repos"
################################# CELERY ######################################
CELERY_ALWAYS_EAGER = True
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
######################### MARKETING SITE ###############################
MKTG_URL_LINK_MAP = {
'ABOUT': 'about',
'CONTACT': 'contact',
'FAQ': 'help',
'COURSES': 'courses',
'ROOT': 'root',
'TOS': 'tos',
'HONOR': 'honor',
'PRIVACY': 'privacy',
'JOBS': 'jobs',
'NEWS': 'news',
'PRESS': 'press',
'BLOG': 'blog',
'DONATE': 'donate',
# Verified Certificates
'WHAT_IS_VERIFIED_CERT': 'verified-certificate',
}
############################ STATIC FILES #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = TEST_ROOT / "uploads"
MEDIA_URL = "/static/uploads/"
STATICFILES_DIRS.append(("uploads", MEDIA_ROOT))
_NEW_STATICFILES_DIRS = []
# Strip out any static files that aren't in the repository root
# so that the tests can run with only the edx-platform directory checked out
for static_dir in STATICFILES_DIRS:
# Handle both tuples and non-tuple directory definitions
try:
_, data_dir = static_dir
except ValueError:
data_dir = static_dir
if data_dir.startswith(REPO_ROOT):
_NEW_STATICFILES_DIRS.append(static_dir)
STATICFILES_DIRS = _NEW_STATICFILES_DIRS
FILE_UPLOAD_TEMP_DIR = TEST_ROOT / "uploads"
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
########################### Server Ports ###################################
# These ports are carefully chosen so that if the browser needs to
# access them, they will be available through the SauceLabs SSH tunnel
LETTUCE_SERVER_PORT = 8003
XQUEUE_PORT = 8040
YOUTUBE_PORT = 8031
LTI_PORT = 8765
VIDEO_SOURCE_PORT = 8777
################### Make tests faster
#http://slacy.com/blog/2012/04/make-your-tests-faster-in-django-1-4/
PASSWORD_HASHERS = (
# 'django.contrib.auth.hashers.PBKDF2PasswordHasher',
# 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
# 'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
# 'django.contrib.auth.hashers.CryptPasswordHasher',
)
### This enables the Metrics tab for the Instructor dashboard ###########
FEATURES['CLASS_DASHBOARD'] = True
################### Make tests quieter
# OpenID spews messages like this to stderr, we don't need to see them:
# Generated checkid_setup request to http://testserver/openid/provider/login/ with assocication {HMAC-SHA1}{51d49995}{s/kRmA==}
import openid.oidutil
openid.oidutil.log = lambda message, level=0: None
PLATFORM_NAME = "edX"
SITE_NAME = "edx.org"
# set up some testing for microsites
MICROSITE_CONFIGURATION = {
"test_microsite": {
"domain_prefix": "testmicrosite",
"university": "test_microsite",
"platform_name": "Test Microsite",
"logo_image_url": "test_microsite/images/header-logo.png",
"email_from_address": "test_microsite@edx.org",
"payment_support_email": "test_microsite@edx.org",
"ENABLE_MKTG_SITE": False,
"SITE_NAME": "test_microsite.localhost",
"course_org_filter": "TestMicrositeX",
"course_about_show_social_links": False,
"css_overrides_file": "test_microsite/css/test_microsite.css",
"show_partners": False,
"show_homepage_promo_video": False,
"course_index_overlay_text": "This is a Test Microsite Overlay Text.",
"course_index_overlay_logo_file": "test_microsite/images/header-logo.png",
"homepage_overlay_html": "<h1>This is a Test Microsite Overlay HTML</h1>",
"ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False,
"COURSE_CATALOG_VISIBILITY_PERMISSION": "see_in_catalog",
"COURSE_ABOUT_VISIBILITY_PERMISSION": "see_about_page",
"ENABLE_SHOPPING_CART": True,
"ENABLE_PAID_COURSE_REGISTRATION": True,
"SESSION_COOKIE_DOMAIN": "test_microsite.localhost",
},
"microsite_with_logistration": {
"domain_prefix": "logistration",
"university": "logistration",
"platform_name": "Test logistration",
"logo_image_url": "test_microsite/images/header-logo.png",
"email_from_address": "test_microsite@edx.org",
"payment_support_email": "test_microsite@edx.org",
"ENABLE_MKTG_SITE": False,
"ENABLE_COMBINED_LOGIN_REGISTRATION": True,
"SITE_NAME": "test_microsite.localhost",
"course_org_filter": "LogistrationX",
"course_about_show_social_links": False,
"css_overrides_file": "test_microsite/css/test_microsite.css",
"show_partners": False,
"show_homepage_promo_video": False,
"course_index_overlay_text": "Logistration.",
"course_index_overlay_logo_file": "test_microsite/images/header-logo.png",
"homepage_overlay_html": "<h1>This is a Logistration HTML</h1>",
"ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False,
"COURSE_CATALOG_VISIBILITY_PERMISSION": "see_in_catalog",
"COURSE_ABOUT_VISIBILITY_PERMISSION": "see_about_page",
"ENABLE_SHOPPING_CART": True,
"ENABLE_PAID_COURSE_REGISTRATION": True,
"SESSION_COOKIE_DOMAIN": "test_logistration.localhost",
},
"default": {
"university": "default_university",
"domain_prefix": "www",
}
}
MICROSITE_ROOT_DIR = COMMON_ROOT / 'test' / 'test_microsites'
MICROSITE_TEST_HOSTNAME = 'testmicrosite.testserver'
MICROSITE_LOGISTRATION_HOSTNAME = 'logistration.testserver'
FEATURES['USE_MICROSITES'] = True
# add extra template directory for test-only templates
MAKO_TEMPLATES['main'].extend([
COMMON_ROOT / 'test' / 'templates'
])
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
VIDEO_CDN_URL = {
'CN': 'http://api.xuetangx.com/edx/video?s3_url='
}
######### dashboard git log settings #########
MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'xlog',
}
# Enable EdxNotes for tests.
FEATURES['ENABLE_EDXNOTES'] = True
# Enable teams feature for tests.
FEATURES['ENABLE_TEAMS'] = True
# Add milestones to Installed apps for testing
INSTALLED_APPS += ('milestones', 'openedx.core.djangoapps.call_stack_manager')
# Enable courseware search for tests
FEATURES['ENABLE_COURSEWARE_SEARCH'] = True
# Enable dashboard search for tests
FEATURES['ENABLE_DASHBOARD_SEARCH'] = True
# Use MockSearchEngine as the search engine for test scenario
SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine"
FACEBOOK_APP_SECRET = "Test"
FACEBOOK_APP_ID = "Test"
FACEBOOK_API_VERSION = "v2.2"
######### custom courses #########
INSTALLED_APPS += ('ccx',)
FEATURES['CUSTOM_COURSES_EDX'] = True
# Set dummy values for profile image settings.
PROFILE_IMAGE_BACKEND = {
'class': 'storages.backends.overwrite.OverwriteStorage',
'options': {
'location': MEDIA_ROOT,
'base_url': 'http://example-storage.com/profile-images/',
},
}
PROFILE_IMAGE_DEFAULT_FILENAME = 'default'
PROFILE_IMAGE_DEFAULT_FILE_EXTENSION = 'png'
PROFILE_IMAGE_SECRET_KEY = 'secret'
PROFILE_IMAGE_MAX_BYTES = 1024 * 1024
PROFILE_IMAGE_MIN_BYTES = 100
# Enable the LTI provider feature for testing
FEATURES['ENABLE_LTI_PROVIDER'] = True
INSTALLED_APPS += ('lti_provider',)
AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend',)
# ORGANIZATIONS
FEATURES['ORGANIZATIONS_APP'] = True
|
noisemaster/AdamTestBot
|
refs/heads/master
|
telegram/userprofilephotos.py
|
2
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains a object that represents a Telegram
UserProfilePhotos."""
from telegram import PhotoSize, TelegramObject
class UserProfilePhotos(TelegramObject):
"""This object represents a Telegram UserProfilePhotos.
Attributes:
total_count (int):
photos (List[List[:class:`telegram.PhotoSize`]]):
Args:
total_count (int):
photos (List[List[:class:`telegram.PhotoSize`]]):
"""
def __init__(self,
total_count,
photos):
# Required
self.total_count = int(total_count)
self.photos = photos
@staticmethod
def de_json(data):
"""
Args:
data (str):
Returns:
telegram.UserProfilePhotos:
"""
if not data:
return None
data['photos'] = [PhotoSize.de_list(photo) for photo in data['photos']]
return UserProfilePhotos(**data)
def to_dict(self):
"""
Returns:
dict:
"""
data = super(UserProfilePhotos, self).to_dict()
data['photos'] = []
for photo in self.photos:
data['photos'].append([x.to_dict() for x in photo])
return data
|
HwisooSo/gemV-update
|
refs/heads/gemV
|
ext/ply/test/yacc_badargs.py
|
174
|
# -----------------------------------------------------------------------------
# yacc_badargs.py
#
# Rules with wrong # args
# -----------------------------------------------------------------------------
import sys
sys.tracebacklimit = 0
sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
# dictionary of names
names = { }
def p_statement_assign(t,s):
'statement : NAME EQUALS expression'
names[t[1]] = t[3]
def p_statement_expr():
'statement : expression'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
def p_error(t):
print("Syntax error at '%s'" % t.value)
yacc.yacc()
|
lmazuel/ansible
|
refs/heads/devel
|
test/integration/targets/uri/files/testserver.py
|
222
|
import sys
if __name__ == '__main__':
if sys.version_info[0] >= 3:
import http.server
import socketserver
PORT = int(sys.argv[1])
Handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer(("", PORT), Handler)
httpd.serve_forever()
else:
import mimetypes
mimetypes.init()
mimetypes.add_type('application/json', '.json')
import SimpleHTTPServer
SimpleHTTPServer.test()
|
liangjiaxing/sympy
|
refs/heads/master
|
sympy/polys/domains/pythonrationalfield.py
|
117
|
"""Implementation of :class:`PythonRationalField` class. """
from __future__ import print_function, division
from sympy.polys.domains.rationalfield import RationalField
from sympy.polys.domains.groundtypes import PythonInteger, PythonRational, SymPyRational
from sympy.polys.polyerrors import CoercionFailed
from sympy.utilities import public
@public
class PythonRationalField(RationalField):
"""Rational field based on Python rational number type. """
dtype = PythonRational
zero = dtype(0)
one = dtype(1)
alias = 'QQ_python'
def __init__(self):
pass
def get_ring(self):
"""Returns ring associated with ``self``. """
from sympy.polys.domains import PythonIntegerRing
return PythonIntegerRing()
def to_sympy(self, a):
"""Convert `a` to a SymPy object. """
return SymPyRational(a.numerator, a.denominator)
def from_sympy(self, a):
"""Convert SymPy's Rational to `dtype`. """
if a.is_Rational:
return PythonRational(a.p, a.q)
elif a.is_Float:
from sympy.polys.domains import RR
p, q = RR.to_rational(a)
return PythonRational(int(p), int(q))
else:
raise CoercionFailed("expected `Rational` object, got %s" % a)
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
return PythonRational(a)
def from_QQ_python(K1, a, K0):
"""Convert a Python `Fraction` object to `dtype`. """
return a
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return PythonRational(PythonInteger(a))
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return PythonRational(PythonInteger(a.numer()),
PythonInteger(a.denom()))
def from_RealField(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
p, q = K0.to_rational(a)
return PythonRational(int(p), int(q))
def numer(self, a):
"""Returns numerator of `a`. """
return a.numerator
def denom(self, a):
"""Returns denominator of `a`. """
return a.denominator
|
wwfifi/uliweb
|
refs/heads/master
|
uliweb/manage.py
|
1
|
#!/usr/bin/env python
import sys, os
import logging
import inspect
from optparse import make_option
import uliweb
from uliweb.core.commands import Command, CommandManager
from uliweb.core import SimpleFrame
apps_dir = 'apps'
__commands__ = {}
log = logging.getLogger('uliweb.console')
def get_commands(global_options):
global __commands__
def check(c):
return (inspect.isclass(c) and
issubclass(c, Command) and c is not Command and c is not CommandManager)
def find_mod_commands(mod):
for name in dir(mod):
c = getattr(mod, name)
if check(c):
register_command(c)
def collect_commands():
from uliweb import get_apps, get_app_dir
from uliweb.utils.common import is_pyfile_exist
apps = get_apps(global_options.apps_dir, settings_file=global_options.settings,
local_settings_file=global_options.local_settings)
for f in apps:
path = get_app_dir(f)
if is_pyfile_exist(path, 'commands'):
m = '%s.commands' % f
mod = __import__(m, fromlist=['*'])
find_mod_commands(mod)
collect_commands()
return __commands__
def register_command(kclass):
global __commands__
__commands__[kclass.name] = kclass
workpath = os.path.join(os.path.dirname(__file__), 'lib')
if workpath not in sys.path:
sys.path.insert(0, os.path.join(workpath, 'lib'))
def install_config(apps_dir):
from uliweb.utils import pyini
#user can configure custom PYTHONPATH, so that uliweb can add these paths
#to sys.path, and user can manage third party or public apps in a separate
#directory
config_filename = os.path.join(apps_dir, 'config.ini')
if os.path.exists(config_filename):
c = pyini.Ini(config_filename)
paths = c.GLOBAL.get('PYTHONPATH', [])
if paths:
for p in reversed(paths):
p = os.path.abspath(os.path.normpath(p))
if not p in sys.path:
sys.path.insert(0, p)
def make_application(debug=None, apps_dir='apps', project_dir=None,
include_apps=None, debug_console=True, settings_file=None,
local_settings_file=None, start=True, default_settings=None,
dispatcher_cls=None, dispatcher_kwargs=None, debug_cls=None, debug_kwargs=None,
reuse=True, verbose=False, pythonpath=None):
"""
Make an application object
"""
from uliweb.utils.common import import_attr
from werkzeug.debug import DebuggedApplication
#is reuse, then create application only one
if reuse and hasattr(SimpleFrame.__global__, 'application') and SimpleFrame.__global__.application:
return SimpleFrame.__global__.application
#process settings and local_settings
settings_file = settings_file or os.environ.get('SETTINGS', 'settings.ini')
local_settings_file = local_settings_file or os.environ.get('LOCAL_SETTINGS', 'local_settings.ini')
dispatcher_cls = dispatcher_cls or SimpleFrame.Dispatcher
dispatcher_kwargs = dispatcher_kwargs or {}
if project_dir:
apps_dir = os.path.abspath(os.path.normpath(os.path.join(project_dir, 'apps')))
if not project_dir:
project_dir = os.path.abspath(os.path.normpath(os.path.abspath(os.path.join(apps_dir, '..'))))
if pythonpath:
if isinstance(pythonpath, str):
pythonpath = pythonpath.split(';')
for x in pythonpath:
if x not in sys.path:
sys.path.insert(0, x)
if project_dir not in sys.path:
sys.path.insert(0, project_dir)
if apps_dir not in sys.path:
sys.path.insert(0, apps_dir)
install_config(apps_dir)
application = app = dispatcher_cls(apps_dir=apps_dir,
include_apps=include_apps,
settings_file=settings_file,
local_settings_file=local_settings_file,
start=start,
default_settings=default_settings,
reset=True,
**dispatcher_kwargs)
if verbose:
log.info(' * settings file is "%s"' % settings_file)
log.info(' * local settings file is "%s"' % local_settings_file)
#settings global application object
SimpleFrame.__global__.application = app
#process wsgi middlewares
middlewares = []
parameters = {}
for name, v in uliweb.settings.get('WSGI_MIDDLEWARES', {}).iteritems():
order, kwargs = 500, {}
if not v:
continue
if isinstance(v, (list, tuple)):
if len(v) > 3:
logging.error('WSGI_MIDDLEWARE %s difinition is not right' % name)
raise uliweb.UliwebError('WSGI_MIDDLEWARE %s difinition is not right' % name)
cls = v[0]
if len(v) == 2:
if isinstance(v[1], int):
order = v[1]
else:
kwargs = v[1]
else:
order, kwargs = v[1], v[2]
else:
cls = v
middlewares.append((order, name))
parameters[name] = cls, kwargs
middlewares.sort(cmp=lambda x, y: cmp(x[0], y[0]))
for name in reversed([x[1] for x in middlewares]):
clspath, kwargs = parameters[name]
cls = import_attr(clspath)
app = cls(app, **kwargs)
debug_flag = uliweb.settings.GLOBAL.DEBUG
if debug or (debug is None and debug_flag):
if not debug_cls:
debug_cls = DebuggedApplication
log.setLevel(logging.DEBUG)
log.info(' * Loading DebuggedApplication...')
app.debug = True
app = debug_cls(app, uliweb.settings.GLOBAL.get('DEBUG_CONSOLE', False))
return app
def make_simple_application(apps_dir='apps', project_dir=None, include_apps=None,
settings_file='', local_settings_file='',
default_settings=None, dispatcher_cls=None, dispatcher_kwargs=None, reuse=True,
pythonpath=None):
settings = {'ORM/AUTO_DOTRANSACTION':False}
settings.update(default_settings or {})
return make_application(apps_dir=apps_dir, project_dir=project_dir,
include_apps=include_apps, debug_console=False, debug=False,
settings_file=settings_file, local_settings_file=local_settings_file,
start=False, default_settings=settings, dispatcher_cls=dispatcher_cls,
dispatcher_kwargs=dispatcher_kwargs, reuse=reuse, pythonpath=pythonpath)
class MakeAppCommand(Command):
name = 'makeapp'
args = 'appname'
help = 'Create a new app according the appname parameter.'
check_apps_dirs = False
def handle(self, options, global_options, *args):
from uliweb.utils.common import extract_dirs
if not args:
appname = ''
while not appname:
appname = raw_input('Please enter app name:')
apps = [appname]
else:
apps = args
for appname in apps:
ans = '-1'
app_path = appname.replace('.', '//')
if os.path.exists('apps'):
path = os.path.join('apps', app_path)
else:
path = app_path
if os.path.exists(path):
if global_options.yes:
ans = 'y'
while ans not in ('y', 'n'):
ans = raw_input('The app directory has been existed, do you want to overwrite it?(y/n)[n]')
if not ans:
ans = 'n'
else:
ans = 'y'
if ans == 'y':
extract_dirs('uliweb', 'template_files/app', path, verbose=global_options.verbose)
register_command(MakeAppCommand)
class MakePkgCommand(Command):
name = 'makepkg'
args = '<pkgname1, pkgname2, ...>'
help = 'Create new python package folders.'
check_apps_dirs = False
def handle(self, options, global_options, *args):
if not args:
while not args:
args = raw_input('Please enter python package name:')
args = [args]
for p in args:
if not os.path.exists(p):
os.makedirs(p)
initfile = os.path.join(p, '__init__.py')
if not os.path.exists(initfile):
f = open(initfile, 'w')
f.close()
register_command(MakePkgCommand)
class MakeProjectCommand(Command):
name = 'makeproject'
help = 'Create a new project directory according the project name'
args = 'project_name'
check_apps_dirs = False
def handle(self, options, global_options, *args):
from uliweb.utils.common import extract_dirs
from uliweb.core.template import template_file
if not args:
project_name = ''
while not project_name:
project_name = raw_input('Please enter project name:')
else:
project_name = args[0]
ans = '-1'
if os.path.exists(project_name):
if global_options.yes:
ans = 'y'
while ans not in ('y', 'n'):
ans = raw_input('The project directory has been existed, do you want to overwrite it?(y/n)[n]')
if not ans:
ans = 'n'
else:
ans = 'y'
if ans == 'y':
extract_dirs('uliweb', 'template_files/project', project_name, verbose=global_options.verbose)
#template setup.py
setup_file = os.path.join(project_name, 'setup.py')
text = template_file(setup_file, {'project_name':project_name})
with open(setup_file, 'w') as f:
f.write(text)
#rename .gitignore.template to .gitignore
os.rename(os.path.join(project_name, '.gitignore.template'), os.path.join(project_name, '.gitignore'))
register_command(MakeProjectCommand)
class SupportCommand(Command):
name = 'support'
help = 'Add special support to existed project, such as: gae, dotcloud, sae, bae, fcgi, heroku, tornado, gevent, gevent-socketio'
args = 'supported_type'
check_apps_dirs = True
def handle(self, options, global_options, *args):
from uliweb.utils.common import copy_dir
from uliweb.utils.common import pkg
_types = []
support_dirs = {}
app_dirs = [os.path.join(SimpleFrame.get_app_dir(appname), 'template_files/support') for appname in self.get_apps(global_options)]
for path in [pkg.resource_filename('uliweb', 'template_files/support/')] + app_dirs:
if os.path.exists(path):
for f in os.listdir(path):
_path = os.path.join(path, f)
if os.path.isdir(_path) and not f.startswith('.'):
_name = f
_types.append(_name)
support_dirs[_name] = _path
support_type = args[0] if args else ''
while not support_type in _types and support_type != 'quit':
print 'Supported types:\n'
print ' ' + '\n '.join(sorted(_types))
print
support_type = raw_input('Please enter support type[quit to exit]:')
if support_type != 'quit':
src_dir = support_dirs[support_type]
copy_dir(src_dir, '.', verbose=global_options.verbose)
register_command(SupportCommand)
class ConfigCommand(Command):
name = 'config'
help = 'Output config info for different support, such as: nginx, uwsgi, etc.'
args = 'supported_type'
check_apps_dirs = True
def handle(self, options, global_options, *args):
from uliweb.utils.common import pkg
from uliweb.utils.pyini import Ini
from uliweb.core.commands import get_input
from uliweb.core.template import template_file
import glob
_types = []
config_files = {}
app_dirs = [os.path.join(SimpleFrame.get_app_dir(appname), 'template_files/config') for appname in self.get_apps(global_options)]
for path in [pkg.resource_filename('uliweb', 'template_files/config/')] + app_dirs:
if os.path.exists(path):
files = glob.glob(os.path.join(path, '*.conf'))
if files:
for f in files:
_name = os.path.splitext(os.path.basename(f))[0]
_types.append(_name)
config_files[_name] = f
support_type = args[0] if args else ''
while not support_type in _types and support_type != 'quit':
print 'Supported types:\n'
print ' ' + '\n '.join(sorted(_types))
print
support_type = raw_input('Please enter support type[quit to exit]:')
if support_type != 'quit':
conf_file = config_files[support_type]
conf_ini = conf_file[:-5] + '.ini'
if not os.path.exists(conf_file):
log.error("%s config can't be found" % support_type)
sys.exit(1)
data = {}
data['project_dir'] = os.path.abspath(os.getcwd())
data['project'] = os.path.basename(data['project_dir'])
if os.path.exists(conf_ini):
x = Ini(conf_ini)
for k, v in x.INPUT.items():
if isinstance(v, (tuple, list)):
if len(v) == 2:
prompt, default = v
else:
prompt = v[0]
default = ''
else:
prompt, default = v or '', ''
if not prompt.strip():
prompt = 'Please input %s[%s]:' % (k, default)
r = get_input(prompt, default=default)
data[k] = r
data.update(x.get('DEFAULT', {}))
print
print template_file(conf_file, data)
register_command(ConfigCommand)
class ExportStaticCommand(Command):
"""
Compress js and css will follow the rule that: if the filename include
'.min.' or '.pack.',
then don't process it.
"""
name = 'exportstatic'
help = 'Export all installed apps static directory to output directory.'
args = 'output_directory [app1, app2, ...]'
check_apps_dirs = True
option_list = (
make_option('-c', '--check', action='store_true',
help='Check if the output files or directories have conflicts.'),
make_option('--js', action='store_true', dest='js', default=False,
help='Enable javascript compress process.'),
make_option('--css', action='store_true', dest='css', default=False,
help='Enable css compress process.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Enable javascript and css both compress process.'),
)
def handle(self, options, global_options, *args):
from uliweb.utils.common import copy_dir_with_check
from uliweb import settings
self.get_application(global_options)
if not args:
print >>sys.stderr, "Error: outputdir should be a directory and existed"
sys.exit(0)
else:
outputdir = os.path.abspath(args[0])
if global_options.verbose:
print "Export direcotry is %s ..." % outputdir
if not args[1:]:
apps = self.get_apps(global_options)
else:
apps = args[1:]
dirs = [os.path.join(SimpleFrame.get_app_dir(appname), 'static') for appname in apps]
self.options = options
self.global_options = global_options
copy_dir_with_check(dirs, outputdir, False, options.check, processor=self.process_file)
self.process_combine(outputdir, global_options.verbose)
def process_combine(self, outputdir, verbose=False):
#automatically process static combine
from uliweb.contrib.template import init_static_combine
from rjsmin.rjsmin import jsmin
from rcssmin.rcssmin import cssmin
import glob
#delete combined files
for f in glob.glob(os.path.join(outputdir, '_cmb_*')):
try:
os.remove(f)
except:
print "Error: static file [%s] can't be deleted"
d = init_static_combine()
for k, v in d.items():
filename = os.path.join(outputdir, k)
if verbose:
print 'Process ... %s' % filename
readme = os.path.splitext(filename)[0] + '.txt'
with open(filename, 'w') as f:
ext = os.path.splitext(k)[1]
if ext == '.js':
processor = jsmin
elif ext == '.css':
processor = cssmin
else:
print "Error: Unsupport type %s" % ext
sys.exit(1)
for x in v:
fname = os.path.join(outputdir, x)
if verbose:
print ' add %s' % fname
kwargs = {}
if ext == '.css':
kwargs = {'base_dir':os.path.dirname(x)}
f.write(processor(open(fname).read(), **kwargs))
f.write('\n')
with open(readme, 'w') as r:
for x in v:
r.write(x)
r.write('\n')
def process_file(self, sfile, dpath, dfile):
from rjsmin.rjsmin import jsmin
from rcssmin.rcssmin import cssmin
js_compressor = None
css_compressor = None
if sfile.endswith('.js') and ('.min.' not in sfile and '.pack.' not in sfile) and (self.options.js or self.options.auto):
open(dfile, 'w').write(jsmin(open(sfile).read()))
if self.global_options.verbose:
print 'Compress %s to %s' % (sfile, dfile)
return True
if sfile.endswith('.css') and ('.min.' not in sfile and '.pack.' not in sfile) and (self.options.css or self.options.auto):
open(dfile, 'w').write(cssmin(open(sfile).read()))
if self.global_options.verbose:
print 'Compress %s to %s' % (sfile, dfile)
return True
register_command(ExportStaticCommand)
class ExportCommand(Command):
name = 'export'
help = 'Export all installed apps or specified module source files to output directory.'
args = '[module1 module2]'
check_apps_dirs = True
option_list = (
make_option('-d', dest='outputdir',
help='Output directory of exported files.'),
)
def handle(self, options, global_options, *args):
from uliweb.utils.common import extract_dirs
if not options.outputdir:
print >>sys.stderr, "Error: please give the output directory with '-d outputdir' argument"
sys.exit(0)
else:
outputdir = options.outputdir
if not args:
apps = self.get_apps(global_options)
else:
apps = args
if not os.path.exists(outputdir):
os.makedirs(outputdir)
for app in apps:
dirs = app.split('.')
mod = []
dest = outputdir
for m in dirs:
mod.append(m)
dest = os.path.join(dest, m)
module = '.'.join(mod)
if global_options.verbose:
print 'Export %s to %s ...' % (module, dest)
if module == app:
recursion = True
else:
recursion = False
extract_dirs(module, '', dest, verbose=global_options.verbose, recursion=recursion)
register_command(ExportCommand)
#class ExtractUrlsCommand(Command):
# name = 'extracturls'
# help = 'Extract all url mappings from view modules to a specified file.'
# args = ''
#
# def handle(self, options, global_options, *args):
# urlfile = 'urls.py'
#
# application = SimpleFrame.Dispatcher(apps_dir=global_options.project, start=False)
# filename = os.path.join(application.apps_dir, urlfile)
# if os.path.exists(filename):
# answer = raw_input("Error: [%s] is existed already, do you want to overwrite it[Y/n]:" % urlfile)
# if answer.strip() and answer.strip.lower() != 'y':
# return
# f = file(filename, 'w')
# print >>f, "from uliweb import simple_expose\n"
# application.url_infos.sort()
# for url, kw in application.url_infos:
# endpoint = kw.pop('endpoint')
# if kw:
# s = ['%s=%r' % (k, v) for k, v in kw.items()]
# t = ', %s' % ', '.join(s)
# else:
# t = ''
# print >>f, "simple_expose(%r, %r%s)" % (url, endpoint, t)
# f.close()
# print 'urls.py has been created successfully.'
#register_command(ExtractUrlsCommand)
#
class CallCommand(Command):
name = 'call'
help = 'Call <exefile>.py for each installed app according the command argument.'
args = '[-a appname] exefile'
option_list = (
make_option('-a', dest='appname',
help='Appname. If not provide, then will search exefile in whole project.'),
make_option('--without-application', action='store_false', default=True, dest='application',
help='If create application first, default is False.'),
make_option('--gevent', action='store_true', default=False, dest='gevent',
help='Apply gevent monkey patch before execute the script.'),
)
def handle(self, options, global_options, *args):
from uliweb.utils.common import is_pyfile_exist
from uliweb.core.SimpleFrame import get_app_dir
if not args:
print "Error: There is no command module name behind call command."
return
else:
command = args[0]
if options.gevent:
from gevent import monkey
monkey.patch_all()
if options.application:
self.get_application(global_options)
if not options.appname:
apps = self.get_apps(global_options)
else:
apps = [options.appname]
exe_flag = False
def get_module(command, apps):
if '.' in command:
yield 'mod', '', command
else:
for f in apps:
yield 'app', f, command
for _type, app, m in get_module(command, apps):
mod = None
if _type == 'mod':
mod_name = m
if global_options.verbose:
print "Importing... %s" % mod_name
mod = __import__(m, fromlist=['*'])
else:
path = get_app_dir(app)
if is_pyfile_exist(path, m):
mod_name = app + '.' + m
if global_options.verbose:
print "Importing... %s" % mod_name
mod = __import__('%s.%s' % (app, m), fromlist=['*'])
if mod:
if hasattr(mod, 'call'):
getattr(mod, 'call')(args, options, global_options)
elif hasattr(mod, 'main'):
getattr(mod, 'main')(args, options, global_options)
else:
print "Can't find call() or main() function in module %s" % mod_name
exe_flag = True
if not exe_flag:
print "Error: Can't import the [%s], please check the file and try again." % command
register_command(CallCommand)
class InstallCommand(Command):
name = 'install'
help = 'install [appname,...] extra modules listed in requirements.txt'
args = '[appname]'
def handle(self, options, global_options, *args):
from uliweb.core.SimpleFrame import get_app_dir
#check pip or setuptools
try:
import pip
except:
print "Error: can't import pip module, please install it first"
sys.exit(1)
apps = args or self.get_apps(global_options)
def get_requirements():
for app in apps:
path = get_app_dir(app)
r_file = os.path.join(path, 'requirements.txt')
if os.path.exists(r_file):
yield r_file
r_file = os.path.join(global_options.project, 'requirements.txt')
if os.path.exists(r_file):
yield r_file
for r_file in get_requirements():
if global_options.verbose:
print "Processing... %s" % r_file
os.system('pip install -r %s' % r_file)
register_command(InstallCommand)
class MakeCmdCommand(Command):
name = 'makecmd'
help = 'Created a commands.py to the apps or current directory.'
args = 'appname'
check_apps = False
check_apps_dirs = False
def handle(self, options, global_options, *args):
from uliweb.core.commands import get_input, get_answer
from uliweb.core.template import template_file
from uliweb.utils.common import extract_dirs
from uliweb import get_app_dir
if not args:
path = '.'
else:
path = get_app_dir(args[0])
cmd_filename = os.path.join(path, 'commands.py')
overwrite = True
if os.path.exists(cmd_filename):
overwrite = get_answer('The commands.py is already existed, '
'do you want to overwrite it',
quit='q',
default='n') == 'Y'
if overwrite:
command_file = open(cmd_filename, 'w')
else:
command_file = open(cmd_filename, 'a')
try:
if overwrite:
command_file.write(self._render_tempfile('command_head.tmpl'))
d = {}
d['name'] = get_input('Command name:')
d['has_subcommands'] = get_answer('Has subcommands', default='n') == 'Y'
command_file.write(self._render_tempfile('command.tmpl', d))
if d['has_subcommands']:
subcommand_filename = os.path.join(path, d['name']+'_subcommands.py')
if overwrite:
sub_file = open(subcommand_filename, 'w')
else:
sub_file = open(subcommand_filename, 'a')
try:
if overwrite:
sub_file.write(self._render_tempfile('command_head.tmpl'))
d = {'name':'demoSub', 'has_subcommands':False}
sub_file.write(self._render_tempfile('command.tmpl', d))
finally:
sub_file.close()
finally:
command_file.close()
def _get_tempfile(self, tmplname):
from uliweb.utils.common import pkg
return os.path.join(pkg.resource_filename('uliweb', 'template_files/command'), tmplname)
def _render_tempfile(self, tmplname, vars=None):
from uliweb.core.template import template_file
tempfile = self._get_tempfile(tmplname)
return template_file(tempfile, vars or {})
register_command(MakeCmdCommand)
class RunserverCommand(Command):
name = 'runserver'
help = 'Start a new development server. And it can also startup an app without a whole project.'
args = '[appname appname ...]'
option_list = (
make_option('-h', dest='hostname', default='localhost',
help='Hostname or IP.'),
make_option('-p', dest='port', type='int', default=8000,
help='Port number.'),
make_option('--no-reload', dest='reload', action='store_false', default=True,
help='If auto reload the development server. Default is True.'),
make_option('--no-debug', dest='debug', action='store_false', default=True,
help='If auto enable debug mode. Default is True.'),
make_option('--nocolor', dest='color', action='store_false', default=True,
help='Disable colored log info. Default is False.'),
make_option('--thread', dest='thread', action='store_true', default=False,
help='If use thread server mode. Default is False.'),
make_option('--processes', dest='processes', type='int', default=1,
help='The default number of processes to start.'),
make_option('--ssl', dest='ssl', action='store_true',
help='Using SSL to access http.'),
make_option('--ssl-key', dest='ssl_key', default='ssl.key',
help='The SSL private key filename.'),
make_option('--ssl-cert', dest='ssl_cert', default='ssl.cert',
help='The SSL certificate filename.'),
make_option('--tornado', dest='tornado', action='store_true', default=False,
help='Start uliweb server with tornado.'),
make_option('--gevent', dest='gevent', action='store_true', default=False,
help='Start uliweb server with gevent.'),
make_option('--gevent-socketio', dest='gsocketio', action='store_true', default=False,
help='Start uliweb server with gevent-socketio.'),
make_option('--coverage', dest='coverage', action='store_true', default=False,
help='Start uliweb server with coverage.'),
)
develop = False
check_apps_dirs = False
def handle(self, options, global_options, *args):
import logging
from logging import StreamHandler
from uliweb.utils.coloredlog import ColoredFormatter
from uliweb.utils.common import check_apps_dir
import subprocess
if self.develop:
include_apps = ['plugs.develop']
else:
include_apps = []
#add appname runable support, it'll automatically create a default project
#if you want to startup an app, it'll use a temp directory, default is
old_apps_dir = os.path.abspath(global_options.apps_dir)
if args:
include_apps.extend(args)
project_home_dir = os.path.join(os.path.expanduser('~'), '.uliweb')
if not os.path.exists(project_home_dir):
os.makedirs(project_home_dir)
subprocess.call('uliweb makeproject -y project', cwd=project_home_dir, shell=True)
global_options.project = os.path.join(project_home_dir, 'project')
global_options.apps_dir = os.path.join(global_options.project, 'apps')
check_apps_dir(global_options.apps_dir)
extra_files = collect_files(global_options, global_options.apps_dir, self.get_apps(global_options, include_apps))
if options.color:
def format(self, record):
if not hasattr(self, 'new_formatter'):
if self.formatter:
fmt = ColoredFormatter(format=self.formatter._fmt, datefmt=self.formatter.datefmt, log_colors=uliweb.settings.get('LOG.COLORS', {}))
else:
fmt = ColoredFormatter()
self.new_formatter = fmt
else:
fmt = self.new_formatter
return fmt.format(record)
setattr(StreamHandler, 'format', format)
def get_app(debug_cls=None):
return make_application(options.debug, project_dir=global_options.project,
include_apps=include_apps, settings_file=global_options.settings,
local_settings_file=global_options.local_settings, debug_cls=debug_cls,
verbose=global_options.verbose, pythonpath=old_apps_dir)
cov = None
try:
if options.coverage:
try:
from coverage import coverage
except ImportError:
print "Error: Can't import coverage!"
return
cov = coverage(source=['apps'])
cov.start()
if options.tornado:
self.run_tornado(options, extra_files, get_app)
elif options.gevent:
self.run_gevent(options, extra_files, get_app)
elif options.gsocketio:
self.run_gevent_socketio(options, extra_files, get_app)
else:
self.run_simple(options, extra_files, get_app)
finally:
if cov:
cov.stop()
cov.html_report(directory='covhtml')
def run_tornado(self, options, extra_files, get_app):
try:
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
import tornado.autoreload
except:
print 'Error: Please install tornado first'
return
if options.ssl:
ctx = {
"certfile": options.ssl_cert,
"keyfile": options.ssl_key,
}
log.info(' * Running on https://%s:%d/' % (options.hostname, options.port))
else:
ctx = None
log.info(' * Running on http://%s:%d/' % (options.hostname, options.port))
container = tornado.wsgi.WSGIContainer(get_app())
http_server = tornado.httpserver.HTTPServer(container,
ssl_options=ctx)
http_server.listen(options.port, address=options.hostname)
loop=tornado.ioloop.IOLoop.instance()
if options.reload:
for f in extra_files:
tornado.autoreload.watch(f)
tornado.autoreload.start(loop)
loop.start()
def run_gevent(self, options, extra_files, get_app):
try:
from gevent.wsgi import WSGIServer
from gevent import monkey
except:
print 'Error: Please install gevent first'
return
from werkzeug.serving import run_with_reloader
from functools import partial
monkey.patch_all()
run_with_reloader = partial(run_with_reloader, extra_files=extra_files)
if options.ssl:
ctx = {
"certfile": options.ssl_cert,
"keyfile": options.ssl_key,
}
else:
ctx = {}
@run_with_reloader
def run_server():
log.info(' * Running on http://%s:%d/' % (options.hostname, options.port))
http_server = WSGIServer((options.hostname, options.port), get_app(), **ctx)
http_server.serve_forever()
run_server()
def run_gevent_socketio(self, options, extra_files, get_app):
try:
from gevent import monkey
except:
print 'Error: Please install gevent first'
return
try:
from socketio.server import SocketIOServer
except:
print 'Error: Please install gevent-socketio first'
sys.exit(1)
from werkzeug.serving import run_with_reloader
from functools import partial
monkey.patch_all()
from werkzeug.debug import DebuggedApplication
class MyDebuggedApplication(DebuggedApplication):
def __call__(self, environ, start_response):
# check if websocket call
if "wsgi.websocket" in environ and not environ["wsgi.websocket"] is None:
# a websocket call, no debugger ;)
return self.application(environ, start_response)
# else go on with debugger
return DebuggedApplication.__call__(self, environ, start_response)
if options.ssl:
ctx = {
"certfile": options.ssl_cert,
"keyfile": options.ssl_key,
}
else:
ctx = {}
run_with_reloader = partial(run_with_reloader, extra_files=extra_files)
@run_with_reloader
def run_server():
log.info(' * Running on http://%s:%d/' % (options.hostname, options.port))
SocketIOServer((options.hostname, options.port), get_app(MyDebuggedApplication), resource="socket.io", **ctx).serve_forever()
run_server()
def run_simple(self, options, extra_files, get_app):
from werkzeug.serving import run_simple
if options.ssl:
ctx = 'adhoc'
default = False
if not os.path.exists(options.ssl_key):
log.info(' * SSL key file (%s) not found, will use default ssl config' % options.ssl_key)
default = True
if not os.path.exists(options.ssl_cert) and not default:
log.info(' * SSL cert file (%s) not found, will use default ssl config' % options.ssl_cert)
default = True
if not default:
ctx = (options.ssl_key, options.ssl_cert)
else:
ctx = None
run_simple(options.hostname, options.port, get_app(), options.reload, False, True,
extra_files, 1, options.thread, options.processes, ssl_context=ctx)
register_command(RunserverCommand)
class DevelopCommand(RunserverCommand):
name = 'develop'
develop = True
register_command(DevelopCommand)
class StaticizeCommand(RunserverCommand):
"""
Staticize a site, limit:
1. Only support Get method
2. Json result shuld be exposed as @expose('xxx.json')
3. Support redirect
4. Support i18n
5. Not support parameter of URL
It not works like a spyder.
"""
name = 'staticize'
help = 'Statizice a site to static web pages.'
args = '[options] output_directory'
check_apps_dirs = True
option_list = (
make_option('-l', dest='lang', default='',
help='Language of the site, default it no language specified.'),
# make_option('-o', dest='outputfile', default='',
# help='Output staticize script.'),
)
def handle(self, options, global_options, *args):
import uliweb.core.SimpleFrame
from uliweb.core.SimpleFrame import get_app_dir, url_for as _old_url_for
import uliweb.contrib.i18n.middle_i18n as i18n
from urlparse import urlparse
if not args:
print "Please give output directory."
sys.exit(1)
path = dst_path = args[0]
# if options.lang:
# path = os.path.join(dst_path, options.lang)
# else:
# path = dst_path
#redefine url_for
def _u(endpoint, **values):
url = _old_url_for(endpoint, **values)
return self.fix_url(url)
uliweb.core.SimpleFrame.url_for = _u
#redefine get_language_from_request
def _get_language_from_request(request, settings):
return options.lang
i18n.get_language_from_request = _get_language_from_request
app = self.get_application(global_options,
default_settings={'I18N/URL_LANG_KEY':'lang'})
from uliweb.core.SimpleFrame import url_map
from uliweb.utils.test import client_from_application
from werkzeug import Response
Response.autocorrect_location_header = False
client = client_from_application(app)
u = []
for i, r in enumerate(sorted(url_map.iter_rules(), key=lambda x:x.rule)):
#only execute GET method
end_point = r.rule[1:] or 'index.html'
p = os.path.join(path, end_point)
p = self.fix_url(p)
print 'GET %s to %s' % (r.rule, p)
base_dir = os.path.dirname(p)
if not os.path.exists(base_dir):
os.makedirs(base_dir)
# u.append((r.rule, methods, r.endpoint))
with open(os.path.join(p), 'w') as f:
response = client.get(r.rule, data={'lang':options.lang})
if response.status_code == 302:
f.write('<meta http-equiv="Refresh" content="0; url=%s" />' %
self.fix_url(response.location))
else:
text = self.convert_text(response.data)
f.write(text)
# if i>1:
# return
print "Export static files to %s" % dst_path
call('uliweb exportstatic %s/static' % dst_path)
def convert_text(self, text):
from HTMLParser import HTMLParser
from urlparse import urlparse, urlunparse
pos = []
class MyHTMLParser(HTMLParser):
def handle_starttag(self, tag, attrs):
if tag == 'a':
_attrs = dict(attrs)
if 'href' in _attrs:
p = self.getpos()
pos.append([p[0], p[1], _attrs.get('href'), len(self.get_starttag_text()), _attrs])
parser = MyHTMLParser()
parser.feed(text)
lines = text.splitlines()
num = 0
for line, start, href, length, attrs in reversed(pos):
r = urlparse(href)
if r.scheme or r.netloc:
continue
#relative url
else:
href = self.fix_url(href)
x = list(r)
x[2] = href
url = urlunparse(x)
attrs['href'] = url
tag = '<a ' + ' '.join(['%s="%s"' % (k, v) for k, v in attrs.items()]) + '>'
old_line = lines[line-1]
lines[line-1] = old_line[:start] + tag + old_line[start+length:]
num += 1
if num > 0:
return '\n'.join(lines)
else:
return text
def fix_url(self, p):
if p == '#':
return p
if p == '/':
return '/index.html'
if os.path.splitext(p)[1]:
pass
else:
p += '.html'
return p
register_command(StaticizeCommand)
from code import interact, InteractiveConsole
class MyInteractive(InteractiveConsole):
def interact(self, banner=None, call=None):
"""Closely emulate the interactive Python console.
The optional banner argument specify the banner to print
before the first interaction; by default it prints a banner
similar to the one printed by the real Python interpreter,
followed by the current class name in parentheses (so as not
to confuse this with the real interpreter -- since it's so
close!).
"""
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>> "
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
if banner is None:
self.write("Python %s on %s\n%s\n(%s)\n" %
(sys.version, sys.platform, cprt,
self.__class__.__name__))
else:
self.write("%s\n" % str(banner))
more = 0
if call:
call()
while 1:
try:
if more:
prompt = sys.ps2
else:
prompt = sys.ps1
try:
line = self.raw_input(prompt)
# Can be None if sys.stdin was redefined
encoding = getattr(sys.stdin, "encoding", None)
if encoding and not isinstance(line, unicode):
line = line.decode(encoding)
except EOFError:
self.write("\n")
break
else:
more = self.push(line)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
from uliweb import __version__
class ShellCommand(Command):
name = 'shell'
help = 'Create a new interactive python shell environment.'
args = '<filename>'
check_apps_dirs = True
option_list = (
make_option('-I', dest='no_ipython', default=False, action='store_true',
help='Not using ipython.'),
make_option('-n', '--notebook', dest='notebook', default=False, action='store_true',
help='Starting ipython notebook.'),
make_option('-m', '--module', dest='module', default='',
help="Module name that will be executed when starting the shell."),
)
banner = "Uliweb %s Command Shell" % __version__
skip_options = True
def make_shell_env(self, global_options):
from uliweb import functions, settings
from uliweb.core.SimpleFrame import Dispatcher
application = self.get_application(global_options)
if global_options.project not in sys.path:
sys.path.insert(0, global_options.project)
app = application
while app:
if isinstance(app, Dispatcher):
break
else:
app = app.app
env = {'application':app, 'settings':settings, 'functions':functions}
return env
def handle(self, options, global_options, *args):
import subprocess as sub
args = list(args)
namespace = self.make_shell_env(global_options)
try:
import readline
except ImportError:
print "Module readline not available."
else:
import rlcompleter
readline.parse_and_bind("tab: complete")
try:
import IPython
except ImportError:
IPython = None
#according to https://github.com/ipython/ipython/wiki/Cookbook%3a-Updating-code-for-use-with-IPython-0.11-and-later
if IPython and not options.no_ipython:
if options.module:
_args = ['-m', options.module]
else:
_args = []
if options.notebook:
# from IPython.html.notebookapp import NotebookApp
# app = NotebookApp.instance()
# app.initialize(['--ext', 'uliweb.utils.ipython_extension'] + args)
# app.start()
version = int(IPython.__version__.split('.')[0])
if version < 3:
cmd = ' '.join(['ipython', 'notebook'] + args)
else:
cmd = ' '.join(['jupyter', 'notebook'] + args)
os.environ.update({'LOCAL_SETTINGS':global_options.local_settings,
'SETTINGS':global_options.settings})
sub.call(cmd, shell=True, cwd=os.getcwd())
else:
if options.module:
_args.append('-i')
IPython.start_ipython(_args + args, user_ns=namespace, banner2=self.banner)
else:
if not IPython and not options.no_ipython:
print "Error: Can't import IPython, please install it first"
from code import interact, InteractiveConsole
Interpreter = MyInteractive(namespace)
if args or options.module:
def call():
mod = __import__(options.module or args[0], {}, {}, ['*'])
namespace.update(vars(mod))
else:
call = None
Interpreter.interact(self.banner, call=call)
register_command(ShellCommand)
class FindCommand(Command):
name = 'find'
help = 'Find objects in uliweb, such as: view, template, static file etc.'
args = ''
check_apps_dirs = True
option_list = (
make_option('-t', '--template', dest='template',
help='Find template file path according template filename.'),
make_option('-u', '--url', dest='url',
help='Find views function path according url.'),
make_option('-c', '--static', dest='static',
help='Find static file path according static filename.'),
make_option('-m', '--model', dest='model',
help='Find model definition according model name.'),
make_option('-o', '--option', dest='option',
help='Find ini option defined in which settings.ini.'),
make_option('--tree', dest='tree', action='store_true',
help='Find template invoke tree, should be used with -t option together.'),
make_option('--blocks', dest='blocks', action='store_true',
help='Display blocks defined in a template, only available when searching template.'),
make_option('--with-filename', dest='with_filename', action='store_true',
help='Display blocks defined in a template with template filename.'),
make_option('--source', dest='source', action='store_true',
help='Output generated python source code of template.'),
make_option('--comment', dest='comment', action='store_true',
help='Output generated python source code of template and also output comment for each line.'),
)
def handle(self, options, global_options, *args):
self.get_application(global_options)
if options.url:
self._find_url(options.url)
elif options.template:
self._find_template(options.template, options.tree,
options.blocks, options.with_filename,
options.source, options.comment)
elif options.static:
self._find_static(global_options, options.static)
elif options.model:
self._find_model(global_options, options.model)
elif options.option:
self._find_option(global_options, options.option)
def _find_url(self, url):
from uliweb.core.SimpleFrame import url_map
from werkzeug.test import EnvironBuilder
from uliweb import NotFound
builder = EnvironBuilder(url)
env = builder.get_environ()
url_adapter = url_map.bind_to_environ(env)
try:
endpoint, values = url_adapter.match()
print '%s' % endpoint
except NotFound:
print 'Not Found'
def _find_template(self, template, tree, blocks, with_filename,
source, comment):
"""
If tree is true, then will display the track of template extend or include
"""
from uliweb import application
from uliweb.core.template import _format_code
def get_rel_filename(filename, path):
f1 = os.path.splitdrive(filename)[1]
f2 = os.path.splitdrive(path)[1]
f = os.path.relpath(f1, f2).replace('\\', '/')
if f.startswith('..'):
return filename.replace('\\', '/')
else:
return f
template_file = None
if not tree:
application.template_loader.comment = comment
files = application.template_loader.find_templates(template)
if files:
template_file = files[0]
for x in files:
print x
if source:
print
print '---------------- source of %s ---------------' % template
t = application.template_loader.load(template_file)
if t and comment:
print _format_code(t.code).rstrip()
print
else:
print t.code
print
else:
print 'Not Found'
else:
application.template_loader.print_tree(template)
if template_file and blocks:
application.template_loader.print_blocks(template, with_filename)
def _find_static(self, global_options, static):
from uliweb import get_app_dir
apps = self.get_apps(global_options)
for appname in reversed(apps):
path = os.path.join(get_app_dir(appname), 'static', static)
if os.path.exists(path):
print '%s' % path
return
print 'Not Found'
def _find_model(self, global_options, model):
from uliweb import settings
model_path = settings.MODELS.get(model, 'Not Found')
print model_path
def _find_option(self, global_options, option):
from uliweb import settings
from uliweb.core.SimpleFrame import collect_settings
from uliweb.utils.pyini import Ini
print '------ Combined value of [%s] ------' % option
print settings.get_var(option)
print '------ Detail value of [%s] ------' % option
sec_flag = '/' not in option
if not sec_flag:
section, key = option.split('/')
for f in collect_settings(global_options.project, settings_file=global_options.settings,
local_settings_file=global_options.local_settings):
x = Ini(f, raw=True, basepath=global_options.apps_dir)
if sec_flag:
if option in x:
print x[option]
else:
if section in x:
if key in x[section]:
v = x[section][key]
print "%s %s%s" % (str(v), key, v.value())
register_command(FindCommand)
class ValidateTemplateCommand(Command):
name = 'validatetemplate'
help = 'Validate template files syntax.'
args = '[appname] [-f tempaltefile]'
check_apps_dirs = True
option_list = (
make_option('-f', dest='template',
help='Template filename which will be validated.'),
)
def handle(self, options, global_options, *args):
self.get_application(global_options)
from uliweb import application as app
if options.template:
files = [options.template]
else:
if args:
files = self._find_templates(args)
else:
files = self._find_templates(app.apps)
self._validate_templates(app, files, global_options.verbose)
def _find_templates(self, apps):
from glob import glob
from uliweb import get_app_dir
from uliweb.utils.common import walk_dirs
for app in apps:
path = os.path.join(get_app_dir(app), 'templates')
for f in walk_dirs(path, include_ext=['.html']):
yield f
def _validate_templates(self, app, files, verbose):
"""
If tree is true, then will display the track of template extend or include
"""
from uliweb import application
from uliweb.core.template import template_file
from uliweb.utils.common import trim_path
app.template_loader.log = None
for f in files:
try:
t = app.template_loader.load(f)
if verbose:
print 'PASSED', f
except Exception as e:
print 'FAILED', f, str(e)
register_command(ValidateTemplateCommand)
def collect_files(options, apps_dir, apps):
files = [os.path.join(apps_dir, options.settings),
os.path.join(apps_dir, options.local_settings)]
def f(path):
if not os.path.exists(path):
log.error("Path %s is not existed!" % path)
return
for r in os.listdir(path):
if r in ['.svn', '_svn', '.git'] or r.startswith('.'):
continue
fpath = os.path.join(path, r)
if os.path.isdir(fpath):
f(fpath)
else:
ext = os.path.splitext(fpath)[1]
if ext in ['.py', '.ini']:
files.append(fpath)
from uliweb import get_app_dir
for p in apps:
path = get_app_dir(p)
files.append(os.path.join(path, 'config.ini'))
files.append(os.path.join(path, 'settings.ini'))
f(path)
return files
def call(args=None):
from uliweb.core.commands import execute_command_line
def callback(global_options):
apps_dir = os.path.abspath(global_options.apps_dir or os.path.join(os.getcwd(), 'apps'))
if os.path.exists(apps_dir) and apps_dir not in sys.path:
sys.path.insert(0, apps_dir)
install_config(apps_dir)
from uliweb.i18n.i18ntool import I18nCommand
register_command(I18nCommand)
if isinstance(args, (unicode, str)):
import shlex
args = shlex.split(args)
execute_command_line(args or sys.argv, get_commands, 'uliweb', callback)
def main():
call()
if __name__ == '__main__':
main()
|
apple/llvm-project
|
refs/heads/llvm.org/main
|
lldb/test/API/functionalities/breakpoint/hardware_breakpoints/hardware_breakpoint_on_multiple_threads/TestHWBreakMultiThread.py
|
7
|
"""
Test hardware breakpoints for multiple threads.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
from functionalities.breakpoint.hardware_breakpoints.base import *
class HardwareBreakpointMultiThreadTestCase(HardwareBreakpointTestBase):
mydir = TestBase.compute_mydir(__file__)
def does_not_support_hw_breakpoints(self):
return not super().supports_hw_breakpoints()
@skipIfOutOfTreeDebugserver
@skipTestIfFn(does_not_support_hw_breakpoints)
def test_hw_break_set_delete_multi_thread_macos(self):
self.build()
self.setTearDownCleanup()
self.break_multi_thread('delete')
@skipIfOutOfTreeDebugserver
@skipTestIfFn(does_not_support_hw_breakpoints)
def test_hw_break_set_disable_multi_thread_macos(self):
self.build()
self.setTearDownCleanup()
self.break_multi_thread('disable')
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Our simple source filename.
self.source = 'main.cpp'
# Find the line number to break inside main().
self.first_stop = line_number(
self.source, 'Starting thread creation with hardware breakpoint set')
def break_multi_thread(self, removal_type):
"""Test that lldb hardware breakpoints work for multiple threads."""
self.runCmd("file " + self.getBuildArtifact("a.out"),
CURRENT_EXECUTABLE_SET)
# Stop in main before creating any threads.
lldbutil.run_break_set_by_file_and_line(
self, None, self.first_stop, num_expected_locations=1)
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# We should be stopped again due to the breakpoint.
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped', 'stop reason = breakpoint'])
# Now set a hardware breakpoint in thread function.
self.expect("breakpoint set -b hw_break_function --hardware",
substrs=[
'Breakpoint',
'hw_break_function',
'address = 0x'])
# We should stop in hw_break_function function for 4 threads.
count = 0
while count < 2 :
self.runCmd("process continue")
# We should be stopped in hw_break_function
# The stop reason of the thread should be breakpoint.
self.expect(
"thread list",
STOPPED_DUE_TO_BREAKPOINT,
substrs=[
'hw_break_function',
'stop reason = breakpoint',
])
# Continue the loop and test that we are stopped 4 times.
count += 1
# Check the breakpoint list.
self.expect("breakpoint list", substrs=['hw_break_function', 'hardware'])
self.expect("breakpoint list -v", substrs=['function = hw_break_function', 'hardware = true'])
if removal_type == 'delete':
self.runCmd("settings set auto-confirm true")
# Now 'breakpoint delete' should just work fine without confirmation
# prompt from the command interpreter.
self.expect("breakpoint delete",
startstr="All breakpoints removed")
# Restore the original setting of auto-confirm.
self.runCmd("settings clear auto-confirm")
elif removal_type == 'disable':
self.expect("breakpoint disable",
startstr="All breakpoints disabled.")
# Continue. Program should exit without stopping anywhere.
self.runCmd("process continue")
# Process should have stopped and exited with status = 0
self.expect("process status", PROCESS_STOPPED,
patterns=['Process .* exited with status = 0'])
|
cliqz/socorro
|
refs/heads/master
|
socorro/unittest/external/fs/test_tar_crashstore.py
|
9
|
from nose.tools import eq_, ok_, assert_raises
from mock import Mock, patch
from datetime import datetime
import json
from configman.dotdict import DotDict
from socorro.external.fs.crashstorage import (
TarFileCrashStore,
)
from socorro.external.crashstorage_base import CrashIDNotFound
from socorro.unittest.testbase import TestCase
class TestTarCrashStorage(TestCase):
def setUp(self):
super(TestTarCrashStorage, self).setUp()
def _get_config(self):
config = DotDict()
config.logger = Mock()
config.tarball_name = '/tmp/a_tarball_name.tar'
config.temp_directory = '/tmp'
config.tarfile_module = Mock()
config.gzip_module = Mock()
config.os_module = Mock()
config.redactor_class = Mock()
return config
def test_init(self):
config = self._get_config()
# the call to be tested
tar_store = TarFileCrashStore(config)
# this is what should have happened
ok_(not hasattr(tar_store, 'tar_file'))
ok_(isinstance(tar_store.tarfile_module, Mock))
ok_(isinstance(tar_store.gzip_module, Mock))
ok_(isinstance(tar_store.os_module, Mock))
def test_save_processed(self):
config = self._get_config()
tar_store = TarFileCrashStore(config)
processed_crash = {
'crash_id': '091204bd-87c0-42ba-8f58-554492141212',
'payload': 'nothing to see here',
'some_date': datetime(1960, 5, 4, 15, 10)
}
processed_crash_as_string = json.dumps(
processed_crash,
default=tar_store.stringify_datetimes
)
# the call to be tested
tar_store.save_processed(processed_crash)
# this is what should have happened
ok_(hasattr(tar_store, 'tar_file'))
tar_store.tarfile_module.open.assert_called_once_with(
config.tarball_name,
'w'
)
tar_store.gzip_module.open.assert_called_once_with(
'/tmp/091204bd-87c0-42ba-8f58-554492141212.jsonz',
'w',
9
)
mocked_file_handle = tar_store.gzip_module.open.return_value
mocked_file_handle.write.assert_called_once_with(
processed_crash_as_string
)
mocked_file_handle.close.assert_called_once_with()
tar_store.tar_file.add.assert_called_once_with(
'/tmp/091204bd-87c0-42ba-8f58-554492141212.jsonz',
'09/12/091204bd-87c0-42ba-8f58-554492141212.jsonz'
)
tar_store.os_module.unlink.assert_called_once_with(
'/tmp/091204bd-87c0-42ba-8f58-554492141212.jsonz'
)
|
hortonworks/hortonworks-sandbox
|
refs/heads/master
|
desktop/core/ext-py/django-extensions-0.5/django_extensions/management/commands/describe_form.py
|
30
|
from django.core.management.base import LabelCommand, CommandError
from django.utils.encoding import force_unicode
class Command(LabelCommand):
help = "Outputs the specified model as a form definition to the shell."
args = "[app.model]"
label = 'application name and model name'
requires_model_validation = True
can_import_settings = True
def handle_label(self, label, **options):
return describe_form(label)
def describe_form(label, fields=None):
"""
Returns a string describing a form based on the model
"""
from django.db.models.loading import get_model
try:
app_name, model_name = label.split('.')[-2:]
except (IndexError, ValueError):
raise CommandError("Need application and model name in the form: appname.model")
model = get_model(app_name, model_name)
opts = model._meta
field_list = []
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
if fields and not f.name in fields:
continue
formfield = f.formfield()
if not '__dict__' in dir(formfield):
continue
attrs = {}
valid_fields = ['required', 'initial', 'max_length', 'min_length', 'max_value', 'min_value', 'max_digits', 'decimal_places', 'choices', 'help_text', 'label']
for k,v in formfield.__dict__.items():
if k in valid_fields and v != None:
# ignore defaults, to minimize verbosity
if k == 'required' and v:
continue
if k == 'help_text' and not v:
continue
if k == 'widget':
attrs[k] = v.__class__
elif k in ['help_text', 'label']:
attrs[k] = force_unicode(v).strip()
else:
attrs[k] = v
params = ', '.join(['%s=%r' % (k, v) for k, v in attrs.items()])
field_list.append(' %(field_name)s = forms.%(field_type)s(%(params)s)' % { 'field_name': f.name,
'field_type': formfield.__class__.__name__,
'params': params })
return '''
from django import forms
from %(app_name)s.models import %(object_name)s
class %(object_name)sForm(forms.Form):
%(field_list)s
''' % { 'app_name': app_name, 'object_name': opts.object_name, 'field_list': '\n'.join(field_list) }
|
vmarkovtsev/django
|
refs/heads/master
|
tests/indexes/tests.py
|
321
|
from unittest import skipUnless
from django.db import connection
from django.test import TestCase
from .models import Article, ArticleTranslation, IndexTogetherSingleList
class SchemaIndexesTests(TestCase):
"""
Test index handling by the db.backends.schema infrastructure.
"""
def test_index_name_hash(self):
"""
Index names should be deterministic.
"""
with connection.schema_editor() as editor:
index_name = editor._create_index_name(
model=Article,
column_names=("c1", "c2", "c3"),
suffix="123",
)
self.assertEqual(index_name, "indexes_article_c1_7ce4cc86123")
def test_index_together(self):
editor = connection.schema_editor()
index_sql = editor._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
# Ensure the index name is properly quoted
self.assertIn(
connection.ops.quote_name(
editor._create_index_name(Article, ['headline', 'pub_date'], suffix='_idx')
),
index_sql[0]
)
def test_index_together_single_list(self):
# Test for using index_together with a single list (#22172)
index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList)
self.assertEqual(len(index_sql), 1)
@skipUnless(connection.vendor == 'postgresql',
"This is a postgresql-specific issue")
def test_postgresql_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
index_sql = connection.schema_editor()._model_indexes_sql(IndexedArticle)
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[2])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
@skipUnless(connection.vendor == 'postgresql',
"This is a postgresql-specific issue")
def test_postgresql_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.schema_editor()._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
@skipUnless(connection.vendor == 'mysql', "This is a mysql-specific issue")
def test_no_index_for_foreignkey(self):
"""
MySQL on InnoDB already creates indexes automatically for foreign keys.
(#14180).
"""
storage = connection.introspection.get_storage_engine(
connection.cursor(), ArticleTranslation._meta.db_table
)
if storage != "InnoDB":
self.skip("This test only applies to the InnoDB storage engine")
index_sql = connection.schema_editor()._model_indexes_sql(ArticleTranslation)
self.assertEqual(index_sql, [])
|
windyuuy/opera
|
refs/heads/master
|
chromium/src/testing/PRESUBMIT.py
|
134
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for testing.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
def CommonChecks(input_api, output_api):
output = []
blacklist = [r'gmock.*', r'gtest.*']
output.extend(input_api.canned_checks.RunPylint(
input_api, output_api, black_list=blacklist))
return output
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
|
sonofeft/DigiPlot
|
refs/heads/master
|
digiplot/plot_area.py
|
1
|
#!/usr/bin/env python
# -*- coding: ascii -*-
from __future__ import absolute_import
from __future__ import print_function
import math
import sys
from PIL import Image, ImageTk, ImageFont, ImageDraw
try:
from PIL import ImageGrab
HAS_IMAGEGRAB = True
except:
HAS_IMAGEGRAB = False
if sys.version_info < (3,):
from cStringIO import StringIO
else:
from io import BytesIO as StringIO
import os
from digiplot.sample_img import TEST_IMAGE
here = os.path.abspath(os.path.dirname(__file__))
font_path = os.path.join( here, 'fonts', 'FreeSerifBoldItalic.ttf' )
freefont10 = ImageFont.truetype ( font_path, 10 )
freefont16 = ImageFont.truetype ( font_path, 16 )
freefont24 = ImageFont.truetype ( font_path, 24 )
freefont36 = ImageFont.truetype ( font_path, 36 )
freefont48 = ImageFont.truetype ( font_path, 48 )
freefont72 = ImageFont.truetype ( font_path, 72 )
def clamp(n, minn, maxn):
if n < minn:
return minn
elif n > maxn:
return maxn
else:
return n
class PlotArea(object):
"""
Logic for interpreting a plot image on a display canvas.
The image can be zoomed and panned within the canvas area.
The image is ALWAYS displayed at UL corner (0,0), but can be at many zoom/pan levels.
fi,fj are the fractional i,j locations within the original image. (value = 0.0 to 1.0)
"""
def __init__(self, w_canv=1000, h_canv=1000):
self.w_canv = w_canv
self.h_canv = h_canv
test_img_data = TEST_IMAGE
self.img = Image.open(StringIO(test_img_data))
self.w_img, self.h_img = self.img.size
self.calc_nominal_zoom()
self.fi_origin = 0.0
self.fj_origin = 1.0 # lower left
self.fimax = 1.0
self.fjmax = 0.0
self.fi_offset = 0.0 # when upper left of canvas is not upper left of image
self.fj_offset = 0.0
self.x_origin = 0.0 # units on the image plot
self.y_origin = 0.0 # units on the image plot
self.xmax = 10.0 # units on the image plot
self.ymax = 10.0 # units on the image plot
self.log_x = False
self.log_y = False
self.log10_x_origin = 1.0 # units on the image plot
self.log10_y_origin = 1.0 # units on the image plot
self.log10_xmax = 2.0 # units on the image plot
self.log10_ymax = 2.0 # units on the image plot
def set_log_x(self):
self.log_x = True
if self.x_origin <= 0.0:
self.x_origin = 0.1
self.log10_x_origin = math.log10( self.x_origin )
def set_linear_x(self):
self.log_x = False
def set_log_y(self):
self.log_y = True
if self.y_origin <= 0.0:
self.y_origin = 0.1
self.log10_y_origin = math.log10( self.y_origin )
def set_linear_y(self):
self.log_y = False
def set_fraction_offset(self, fi=0.0, fj=0.0):
# calc max values to limit setting
i_img_shown = min(self.w_img, self.w_canv / self.img_zoom)
j_img_shown = min(self.h_img, self.h_canv / self.img_zoom)
fi_off_max = clamp( 1.0 - float(i_img_shown)/float(self.w_img), 0., 1.)
fj_off_max = clamp(1.0 - float(j_img_shown)/float(self.h_img), 0., 1.)
#print('fi_off_max=%g, fj_off_max=%g, fi=%g, fj=%g'%(fi_off_max, fj_off_max, fi, fj))
self.fi_offset = clamp(fi, 0., fi_off_max) # when upper left of canvas is not upper left of image
self.fj_offset = clamp(fj, 0., fj_off_max)
def set_canvas_wh(self, w_canv, h_canv):
self.w_canv = w_canv
self.h_canv = h_canv
self.calc_nominal_zoom()
def set_zoom(self, new_zoom):
self.img_zoom = max(new_zoom, self.fit_img_zoom)
def zoom_in(self, zoom_factor=0.1):
self.set_zoom( self.img_zoom * (1.0 + zoom_factor) )
def zoom_out(self, zoom_factor=0.1):
self.set_zoom( self.img_zoom / (1.0 + zoom_factor))
def zoom_to_quadrant(self, qname='UL'):
x_zoom = float(self.w_canv) / float(self.w_img)
y_zoom = float(self.h_canv) / float(self.h_img)
self.img_zoom = max(x_zoom, y_zoom) * 1.5
qname = qname.upper()
if qname=='UL':
self.set_fraction_offset(fi=0.0, fj=0.0)
elif qname=='UR':
self.set_fraction_offset(fi=1.0, fj=0.0)
elif qname=='LR':
self.set_fraction_offset(fi=1.0, fj=1.0)
elif qname=='LL':
self.set_fraction_offset(fi=0.0, fj=1.0)
else:
self.fit_img_on_canvas()
def zoom_into_ij(self,i,j, zoom_factor=0.1):
fi = self.get_img_fi_from_canvas_i( i )
fj = self.get_img_fj_from_canvas_j( j )
#fi_max = self.get_img_fi_from_canvas_i( self.w_canv )
#fj_max = self.get_img_fj_from_canvas_j( self.h_canv )
#print('fi_max=%g, fj_max=%g'%(fi_max, fj_max))
# Want new fi, fj same as old values but with new zoom factor
self.set_zoom( self.img_zoom * (1.0 + zoom_factor) )
fi_new = self.get_img_fi_from_canvas_i( i )
fj_new = self.get_img_fj_from_canvas_j( j )
fi_final = self.fi_offset + fi - fi_new
fj_final = self.fj_offset + fj - fj_new
self.set_fraction_offset( fi=fi_final, fj=fj_final )
def zoom_out_from_ij(self,i,j, zoom_factor=0.1):
fi = self.get_img_fi_from_canvas_i( i )
fj = self.get_img_fj_from_canvas_j( j )
# Want new fi, fj same as old values but with new zoom factor
self.set_zoom( self.img_zoom / (1.0 + zoom_factor))
fi_new = self.get_img_fi_from_canvas_i( i )
fj_new = self.get_img_fj_from_canvas_j( j )
#self.fi_offset += fi - fi_new
#self.fj_offset += fj - fj_new
self.set_fraction_offset( fi=self.fi_offset + fi - fi_new, fj=self.fj_offset + fj - fj_new )
def fit_img_on_canvas(self):
self.calc_nominal_zoom()
self.fi_offset = 0.0 # when upper left of canvas is not upper left of image
self.fj_offset = 0.0
def calc_nominal_zoom(self):
x_zoom = float(self.w_canv) / float(self.w_img)
y_zoom = float(self.h_canv) / float(self.h_img)
self.img_zoom = min(x_zoom, y_zoom)
self.fit_img_zoom = self.img_zoom
def open_img_file(self, img_path):
try:
img = Image.open(img_path)
w_img, h_img = img.size
print('Opened image file:',img_path,' size=',img.size)
self.set_img( img )
return True
except:
print('==========> Error opening image file:',img_path)
return False
def set_img_from_clipboard(self):
if not HAS_IMAGEGRAB:
return False
img = ImageGrab.grabclipboard()
if isinstance(img, Image.Image):
self.set_img( img )
return True
else:
print('WARNING... Pasting Clipboard Image Failed.')
return False
def set_img(self, img):
self.img = img
self.w_img, self.h_img = img.size
self.calc_nominal_zoom()
self.fi_origin = 0.0
self.fj_origin = 1.0 # lower left
self.fimax = 1.0
self.fjmax = 0.0
self.fi_offset = 0.0 # when upper left of canvas is not upper left of image
self.fj_offset = 0.0
self.x_origin = 0.0 # units on the image plot
self.y_origin = 0.0 # units on the image plot
self.xmax = 10.0 # units on the image plot
self.ymax = 10.0 # units on the image plot
self.log_x = False
self.log_y = False
self.log10_x_origin = 1.0 # units on the image plot
self.log10_y_origin = 1.0 # units on the image plot
self.log10_xmax = 2.0 # units on the image plot
self.log10_ymax = 2.0 # units on the image plot
def get_zoomed_offset_img(self, greyscale=False, text='', show_linlog_text=False):
fi_min = max(0.0, self.get_img_fi_from_canvas_i( 0 ))
fi_max = min(1.0, self.get_img_fi_from_canvas_i( self.w_canv ))
fj_min = max(0.0, self.get_img_fj_from_canvas_j( 0 ))
fj_max = min(1.0, self.get_img_fj_from_canvas_j( self.h_canv ))
imin = int( self.w_img * fi_min )
imax = int( self.w_img * fi_max )
jmin = int( self.h_img * fj_min )
jmax = int( self.h_img * fj_max )
img_slice = self.img.crop( (imin,jmin, imax,jmax) )
wz = int((imax-imin+1)*self.img_zoom)
hz = int((jmax-jmin+1)*self.img_zoom)
img_slice_resized = img_slice.resize( (wz, hz), Image.ANTIALIAS)
if (wz>self.w_canv) or (hz>self.h_canv):
bbox = (0,0, min(wz,self.w_canv), min(hz,self.h_canv))
img_slice_resized = img_slice_resized.crop( bbox )
#print('... bbox resized to:', bbox)
if greyscale:
img_slice_resized = img_slice_resized.convert('LA')
# place text onto plot
if text or show_linlog_text:
# Make RGBA image to put text on
img_slice_resized = img_slice_resized.convert('RGBA')
w,h = img_slice_resized.size
d = max(w,h)
img_square = Image.new('RGBA', (d,d))
draw = ImageDraw.Draw(img_square)
def get_font_for_size( s, w_lim ):
myfont = freefont10
for test_font in [freefont16, freefont24, freefont36, freefont48, freefont72]:
wtext, htext = test_font.getsize(s)
if wtext >= w_lim:
return myfont
myfont = test_font
return myfont
# yaxis text
if show_linlog_text:
if self.log_y:
ylab = 'log scale'
color = (255,0,0,120) # red
else:
ylab = 'linear scale'
color = (0,255,0,120) # green
myfont = get_font_for_size( 'logear scale', min(w,h)/2 )
di, dj = myfont.getsize(ylab)
itxt = (h-di)/2
draw.text( (itxt,d-dj),ylab, font=myfont, fill=color )
# rotate image
img_temp = img_square.rotate(270)
img_new = img_temp.crop( (0,0, w,h) )
draw = ImageDraw.Draw(img_new)
# xaxis text
if show_linlog_text:
if self.log_x:
xlab = 'log scale'
color = (255,0,0,120) # red
else:
xlab = 'linear scale'
color = (0,255,0,120) # green
di, dj = myfont.getsize(xlab)
itxt = (w-di)/2
draw.text( (itxt,h-dj),xlab, font=myfont, fill=color )
# center text (if any)
if text:
myfont = get_font_for_size( text, w )
di, dj = myfont.getsize(text)
itxt = (w-di)/2
jtxt = (h-dj)/2
draw.text( (itxt, jtxt), text, font=myfont, fill=(255,0,255,120) ) # magenta
#img_slice_resized = Image.blend(img_slice_resized, img_new, 0.5)
img_slice_resized = Image.alpha_composite(img_slice_resized, img_new)
return img_slice_resized
def get_tk_photoimage(self, greyscale=False, text='', show_linlog_text=False):
img_slice_resized = self.get_zoomed_offset_img(greyscale=greyscale, text=text,
show_linlog_text=show_linlog_text)
return ImageTk.PhotoImage( img_slice_resized )
#def get_img_fi_from_canvas_i(self, i):
# i_zoom = self.w_img * self.fi_offset * self.img_zoom + i
# i_zoom_max = self.w_img * self.img_zoom
# return float(i_zoom) / float(i_zoom_max)
def get_img_i_from_img_fi(self, fi):
return self.w_img * fi
def get_img_j_from_img_fj(self, fj):
return self.h_img * fj
def get_canvas_i_from_img_fi(self, fi):
i_off_screen = self.w_img * self.fi_offset
i_total = self.w_img * fi
return (i_total - i_off_screen) * self.img_zoom
def get_canvas_j_from_img_fj(self, fj):
j_off_screen = self.h_img * self.fj_offset
j_total = self.h_img * fj
return (j_total - j_off_screen) * self.img_zoom
def get_img_fi_from_canvas_i(self, i):
"""Given i on canvas, get fraction of img width that i corresponds to."""
i_off_screen = self.w_img * self.fi_offset
i_zoom = float(i) / self.img_zoom
#i_zoom_max = self.w_img * self.img_zoom
return float(i_off_screen+i_zoom) / float(self.w_img)
def get_img_fj_from_canvas_j(self, j):
"""Given j on canvas, get fraction of img height that j corresponds to."""
j_off_screen = self.h_img * self.fj_offset
j_zoom = float(j) / self.img_zoom
#j_zoom_max = self.h_img * self.img_zoom
return float(j_off_screen+j_zoom) / float(self.h_img)
def adjust_offset(self, di, dj):
dfi = float(di) / self.img_zoom / float(self.w_img)
dfj = float(dj) / self.img_zoom / float(self.h_img)
#self.fi_offset = clamp( dfi+self.fi_offset, 0., 1.)
#self.fj_offset = clamp( dfj+self.fj_offset, 0., 1.)
self.set_fraction_offset( fi=self.fi_offset + dfi, fj=self.fj_offset + dfj )
def define_origin_ij(self, i, j):
"""
Place the origin of the 2D plot at image coordinates i,j.
i and j are in pixel coordinates of the visible/zoomed portion of the canvas.
"""
self.fi_origin = self.get_img_fi_from_canvas_i( i )
self.fj_origin = self.get_img_fj_from_canvas_j( j )
def set_origin_xy(self, x, y):
self.set_x_origin(x)
self.set_y_origin(y)
def set_x_origin(self, x):
self.x_origin = x
if self.log_x and self.x_origin<=0.0:
self.x_origin = 0.1
if self.log_x:
self.log10_x_origin = math.log10( self.x_origin )
def set_y_origin(self, y):
self.y_origin = y
if self.log_y and self.y_origin<=0.0:
self.y_origin = 0.1
if self.log_y:
self.log10_y_origin = math.log10( self.y_origin )
def set_x_max(self, x):
self.xmax = x
if self.log_x and self.xmax<=0.0:
self.xmax = 10.0
if self.log_x:
self.log10_xmax = math.log10( self.xmax )
def set_y_max(self, y):
self.ymax = y
if self.log_y and self.ymax<=0.0:
self.ymax = 10.0
if self.log_y:
self.log10_ymax = math.log10( self.ymax )
def set_ix_origin(self, i, x):
self.fi_origin = self.get_img_fi_from_canvas_i( i )
self.set_x_origin(x)
def set_jy_origin(self, j, y):
self.fj_origin = self.get_img_fj_from_canvas_j( j )
self.set_y_origin(y)
def set_imax_xmax(self, imax, xmax):
self.fimax = self.get_img_fi_from_canvas_i( imax )
self.set_x_max( xmax )
def set_jmax_ymax(self, jmax, ymax):
self.fjmax = self.get_img_fj_from_canvas_j( jmax )
self.set_y_max( ymax )
def get_xy_at_fifj(self, fi, fj):
di = fi - self.fi_origin
dj = self.fj_origin - fj # note LL vs UL
if self.log_x:
dx = self.log10_xmax - self.log10_x_origin
else:
dx = self.xmax - self.x_origin
if self.log_y:
dy = self.log10_ymax - self.log10_y_origin
else:
dy = self.ymax - self.y_origin
try:
if self.log_x:
x = 10.0**( self.log10_x_origin + dx * di / (self.fimax - self.fi_origin) )
else:
x = self.x_origin + dx * di / (self.fimax - self.fi_origin)
if self.log_y:
y = 10.0**( self.log10_y_origin + dy * dj / (self.fj_origin - self.fjmax) ) # note LL vs UL
else:
y = self.y_origin + dy * dj / (self.fj_origin - self.fjmax) # note LL vs UL
except:
return None, None
return x,y
def get_fifj_at_ij(self, i,j):
fi = self.get_img_fi_from_canvas_i( i )
fj = self.get_img_fj_from_canvas_j( j )
return fi, fj
def get_xy_at_ij(self, i,j):
fi = self.get_img_fi_from_canvas_i( i )
fj = self.get_img_fj_from_canvas_j( j )
return self.get_xy_at_fifj(fi, fj)
def get_canvas_i(self, x_float):
if self.log_x:
x10 = math.log10( x_float )
fx = (x10-self.log10_x_origin) / (self.log10_xmax-self.log10_x_origin)
else:
fx = (x_float-self.x_origin) / (self.xmax-self.x_origin)
f_plot = self.fimax - self.fi_origin # fraction of canvas holding plot
i_plot = fx * f_plot * self.w_img * self.img_zoom # i value into plot from origin
i_offset = self.fi_offset * self.w_img * self.img_zoom
i_origin = self.fi_origin * self.w_img * self.img_zoom
i = int( i_plot - i_offset + i_origin )
if i>=0 and i<=self.w_canv:
return i
else:
return -1 # if not on canvas
def get_canvas_j(self, y_float):
if self.log_y:
y10 = math.log10( y_float )
fy = (y10-self.log10_y_origin) / (self.log10_ymax-self.log10_y_origin)
else:
fy = (y_float-self.y_origin) / (self.ymax-self.y_origin)
f_plot = self.fj_origin - self.fjmax # fraction of canvas holding plot
j_plot = fy * f_plot * self.h_img * self.img_zoom # i value into plot from origin
j_offset = self.fj_offset * self.h_img * self.img_zoom
j_origin = self.fj_origin * self.h_img * self.img_zoom
j = int( j_origin - j_offset - j_plot )
if j>=0 and j<=self.h_canv:
return j
else:
return -1 # if not on canvas
def get_ij_at_xy(self, x,y):
i = self.get_canvas_i( x )
j = self.get_canvas_j( y )
if i>=0 and j>=0:
return i,j
else:
return -1, -1 # if not on canvas
def x_is_visible(self, x):
return self.get_canvas_i(x) >= 0
def y_is_visible(self, y):
return self.get_canvas_j(y) >= 0
if __name__ == '__main__':
PA = PlotArea(w_canv=600, h_canv=600)
#PA.open_img_file( 'Cu_cte.png' )
#PA.set_fraction_offset(fi=0.2, fj=0.3)
#PA.zoom_out(zoom_factor=0.5)
img_slice_resized = PA.get_zoomed_offset_img(greyscale=True, show_linlog_text=True)
print('img_slice_resized.size=',img_slice_resized.size)
img_slice_resized.save('test_img.png')
PA.set_ix_origin(76, 0.0)
PA.set_jy_origin(432, 0.0)
PA.set_imax_xmax(540, 20.0)
PA.set_jmax_ymax(48, 120.0)
if 0:
for x,i_exp in [(0.,76), (5.,190), (10.,306), (15.,424), (20.,540)]:
print( 'x=%g, i=%i, i_exp=%i'%(x, PA.get_canvas_i(x),i_exp))
print('='*55)
for y,j_exp in [(0.,432), (20.,369), (40.,304), (80.,177), (120.,49)]:
print( 'y=%g, j=%i, j_exp=%i'%(y, PA.get_canvas_j(y),j_exp))
print()
print( 'x,y at 306,304 =',PA.get_xy_at_ij(306,304) )
else:
print('='*20,' Offset')
PA.set_fraction_offset(fi=0.1, fj=0.1)
for x,i_exp in [(0.,16), (5.,131), (10.,248), (15.,365), (20.,482)]:
print( 'x=%g, i=%i, i_exp=%i'%(x, PA.get_canvas_i(x),i_exp))
print('='*55)
for y,j_exp in [(0.,383), (20.,322), (40.,257), (80.,130), (120.,0)]:
print( 'y=%g, j=%i, j_exp=%i'%(y, PA.get_canvas_j(y),j_exp))
print()
print( 'x,y at 481,0 =',PA.get_xy_at_ij(481,0) )
img_slice_resized = PA.get_zoomed_offset_img(greyscale=True, show_linlog_text=True)
img_slice_resized.save('test_offset_img.png')
|
colossalbit/cssypy
|
refs/heads/master
|
cssypy/functions/functions.py
|
1
|
from .base import register_builtin
from .. import errors, datatypes
def _rgb_arg(x):
if isinstance(x, datatypes.Percentage):
return 255. * x.p / 100.
elif isinstance(x, datatypes.Number):
return x.n
else:
raise errors.CSSValueError()
@register_builtin
def rgb(r, g, b):
rgb = tuple(_rgb_arg(x) for x in (r,g,b))
return datatypes.Color(rgb=rgb, format='rgb')
@register_builtin
def hsl(h, s, l):
if not isinstance(h, datatypes.Number):
raise errors.CSSValueError()
h = (h.n % 360. + 360.) % 360.
if not isinstance(s, datatypes.Percentage) or not isinstance(l, datatypes.Percentage):
raise errors.CSSValueError()
s = s.p / 100.
l = l.p / 100.
return datatypes.Color(hsl=(h,s,l), format='hsl')
|
mwrlabs/veripy
|
refs/heads/master
|
contrib/rfc2473/tn_6in6/packet_processing.py
|
1
|
from scapy.all import *
from veripy import util
from veripy.assertions import *
from veripy.models import ComplianceTestCase
class EncapsulatingHopLimitDecrementedTestCase(ComplianceTestCase):
"""
Encapsulating Hop Limit Decremented (6in6)
Verifies that an intermediate node decrements the hop limit of the
encapsulating packet, not the hop limit of the encapsulated IPv6 packet.
@private
Source: RFC 2473 Section 3.2
"""
def run(self):
self.logger.info("Sending ICMPv6 echo request from TN4 to TN1, via 6in6 tunnel.")
self.node(4).send(
IPv6(src=str(self.node(4).global_ip()), dst=str(self.node(1).global_ip()), hlim=30)/
IPv6(src="8000::1", dst="8001::1", hlim=60)/
ICMPv6EchoRequest(seq=self.next_seq()))
self.logger.info("Checking for a tunnelled packet...")
r1 = self.router(1).iface(1).received(seq=self.seq(), type=ICMPv6EchoRequest)
assertEqual(1, len(r1), "expected a packet to be tunnelled to TR1")
assertEqual(IPv6, r1[0][0].__class__, "expected an encapsulating IPv6 layer")
assertEqual(IPv6, r1[0][1].__class__, "expected an encapsulated IPv6 layer")
assertNotEqual(IPv6, r1[0][2].__class__, "did not expect a second layer of encapsulation")
assertEqual(29, r1[0][0].hlim, "expected the Hop Limit of the encapsulating packet to be decremented")
assertEqual(60, r1[0][1].hlim, "did not expect the Hop Limit of the encapsulated packet to be decremented")
class HopLimitExceededWithinTunnelTestCase(ComplianceTestCase):
"""
Hop Limit Exceeded Within Tunnel
Verifies a hop limit exceeded message is handled correctly when it occurs
within a tunnel.
@private
Source: RFC 2473 Section 8.1
"""
def run(self):
self.logger.info("Sending ICMPv6 echo request from TN4 to TN2, via 6in6 tunnel.")
self.node(4).send(
IPv6(src=str(self.node(4).global_ip()), dst=str(self.node(2).global_ip()))/
ICMPv6EchoRequest(seq=self.next_seq()))
self.logger.info("Checking for the ICMPv6 Echo Request forwarded to TR1...")
r1 = self.router(1).received(iface=1, seq=self.seq(), type=ICMPv6EchoRequest)
assertEqual(1, len(r1), "expected the Echo Request to be tunnelled to TR1")
self.logger.info("Sending Hop Limit Exceeded message from TR1 to RUT...")
self.router(1).send(
IPv6(src=str(self.router(1).iface(1).global_ip()), dst=str(self.target(1).global_ip()))/
ICMPv6TimeExceeded(code=0)/
r1[0], iface=1)
self.logger.info("Checking that the RUT has sent an ICMPv6 Unreachable Node error to TN4...")
r2 = self.node(4).received(type=ICMPv6DestUnreach)
assertEqual(1, len(r2), "expected the RUT to send an ICMPv6 Unreachable Node error to TN4")
assertEqual(0x03, r2[0][ICMPv6DestUnreach].code, "expected the Unreachable Node to have a Code field = 3 (Address Unreachable)")
assertHasLayer(ICMPv6EchoRequest, r2[0], "expected the Hop Limit Exceeded message to contain the original Echo Request")
assertEqual(self.seq(), r2[0][ICMPv6EchoRequest].seq, "expected the Hop Limit Exceeded message to contain the original Echo Request")
assertEqual(ICMPv6EchoRequest, r2[0][ICMPv6DestUnreach][2].__class__, "did not expect the error message to include the tunnel frame")
class UnreachableNodeWithinTunnelTestCase(ComplianceTestCase):
"""
Unreachable Node Within Tunnel
Verifies an unreachable node message is handled correctly when it occurs
within a tunnel.
@private
Source: RFC 2473 Section 8.1
"""
def run(self):
self.logger.info("Sending ICMPv6 echo request from TN4 to TN2, via 6in6 tunnel.")
self.node(4).send(
IPv6(src=str(self.node(4).global_ip()), dst=str(self.node(2).global_ip()))/
ICMPv6EchoRequest(seq=self.next_seq()))
self.logger.info("Checking for the ICMPv6 Echo Request forwarded to TR1...")
r1 = self.router(1).received(iface=1, seq=self.seq(), type=ICMPv6EchoRequest)
assertEqual(1, len(r1), "expected the Echo Request to be tunnelled to TR1")
self.logger.info("Sending Unreachable Node message from TR1 to RUT...")
self.router(1).send(
IPv6(src=str(self.router(1).iface(1).global_ip()), dst=str(self.target(1).global_ip()))/
ICMPv6DestUnreach()/
r1[0], iface=1)
self.logger.info("Checking that the RUT has sent an ICMPv6 Unreachable Node error to TN4...")
r2 = self.node(4).received(type=ICMPv6DestUnreach)
assertEqual(1, len(r2), "expected the RUT to send an ICMPv6 Unreachable Node error to TN4")
assertEqual(0x03, r2[0][ICMPv6DestUnreach].code, "expected the Unreachable Node to have a Code field = 3 (Address Unreachable)")
assertHasLayer(ICMPv6EchoRequest, r2[0], "expected the Hop Limit Exceeded message to contain the original Echo Request")
assertEqual(self.seq(), r2[0][ICMPv6EchoRequest].seq, "expected the Hop Limit Exceeded message to contain the original Echo Request")
assertEqual(ICMPv6EchoRequest, r2[0][ICMPv6DestUnreach][2].__class__, "did not expect the error message to include the tunnel frame")
class PacketTooBigWithinTunnelTestCase(ComplianceTestCase):
"""
Packet Too Big Within Tunnel
Verifies a packet too big message is handled correctly when it occurs
within a tunnel.
@private
Source: RFC 2473 Section 8.1
"""
def run(self):
self.logger.info("Sending ICMPv6 echo request from TN4 to TN2, via 6in6 tunnel.")
self.node(4).send(
util.pad(IPv6(src=str(self.node(4).global_ip()), dst=str(self.node(2).global_ip()))/
ICMPv6EchoRequest(seq=self.next_seq()), 1360, True))
self.logger.info("Checking for the ICMPv6 Echo Request forwarded to TR1...")
r1 = self.router(1).received(iface=1, seq=self.seq(), type=ICMPv6EchoRequest)
assertEqual(1, len(r1), "expected the Echo Request to be tunnelled to TR1")
self.logger.info("Sending packet too big message from TR1 to RUT.")
self.node(1).send((
IPv6(src=str(self.router(1).iface(1).global_ip()), dst=str(self.target(1).global_ip()))/
ICMPv6PacketTooBig(mtu=1280)/
r1[0])[0:1280])
self.logger.info("Checking that RUT has forwarded a Packet Too Big message to TN4...")
r2 = self.node(4).received(type=ICMPv6PacketTooBig)
assertEqual(1, len(r2), "expected the RUT to forward a Packet Too Big message to TN4")
assertEqual(0x00, r2[0][ICMPv6PacketTooBig].code, "expected the Unreachable Node to have a Code field = 0")
assertHasLayer(ICMPv6EchoRequest, r2[0], "expected the Hop Limit Exceeded message to contain the original Echo Request")
assertEqual(self.seq(), r2[0][ICMPv6EchoRequest].seq, "expected the Hop Limit Exceeded message to contain the original Echo Request")
assertEqual(ICMPv6EchoRequest, r2[0][ICMPv6PacketTooBig][2].__class__, "did not expect the error message to include the tunnel frame")
|
rtrigoso/ghost-somepolymath
|
refs/heads/master
|
node_modules/pygmentize-bundled/vendor/pygments/pygments/formatters/terminal.py
|
363
|
# -*- coding: utf-8 -*-
"""
pygments.formatters.terminal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for terminal output with ANSI sequences.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Token, Whitespace
from pygments.console import ansiformat
from pygments.util import get_choice_opt
__all__ = ['TerminalFormatter']
#: Map token types to a tuple of color values for light and dark
#: backgrounds.
TERMINAL_COLORS = {
Token: ('', ''),
Whitespace: ('lightgray', 'darkgray'),
Comment: ('lightgray', 'darkgray'),
Comment.Preproc: ('teal', 'turquoise'),
Keyword: ('darkblue', 'blue'),
Keyword.Type: ('teal', 'turquoise'),
Operator.Word: ('purple', 'fuchsia'),
Name.Builtin: ('teal', 'turquoise'),
Name.Function: ('darkgreen', 'green'),
Name.Namespace: ('_teal_', '_turquoise_'),
Name.Class: ('_darkgreen_', '_green_'),
Name.Exception: ('teal', 'turquoise'),
Name.Decorator: ('darkgray', 'lightgray'),
Name.Variable: ('darkred', 'red'),
Name.Constant: ('darkred', 'red'),
Name.Attribute: ('teal', 'turquoise'),
Name.Tag: ('blue', 'blue'),
String: ('brown', 'brown'),
Number: ('darkblue', 'blue'),
Generic.Deleted: ('red', 'red'),
Generic.Inserted: ('darkgreen', 'green'),
Generic.Heading: ('**', '**'),
Generic.Subheading: ('*purple*', '*fuchsia*'),
Generic.Error: ('red', 'red'),
Error: ('_red_', '_red_'),
}
class TerminalFormatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a text console.
Color sequences are terminated at newlines, so that paging the output
works correctly.
The `get_style_defs()` method doesn't do anything special since there is
no support for common styles.
Options accepted:
`bg`
Set to ``"light"`` or ``"dark"`` depending on the terminal's background
(default: ``"light"``).
`colorscheme`
A dictionary mapping token types to (lightbg, darkbg) color names or
``None`` (default: ``None`` = use builtin colorscheme).
"""
name = 'Terminal'
aliases = ['terminal', 'console']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.darkbg = get_choice_opt(options, 'bg',
['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
# use that to avoid unicode encode problems
if not self.encoding and hasattr(outfile, "encoding") and \
hasattr(outfile, "isatty") and outfile.isatty() and \
sys.version_info < (3,):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
color = color[self.darkbg]
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(ansiformat(color, line))
outfile.write('\n')
if spl[-1]:
outfile.write(ansiformat(color, spl[-1]))
else:
outfile.write(value)
|
UniversalMasterEgg8679/ansible
|
refs/heads/devel
|
lib/ansible/modules/windows/win_group.py
|
56
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Chris Hoffman <choffman@chathamfinancial.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_group
version_added: "1.7"
short_description: Add and remove local groups
description:
- Add and remove local groups
options:
name:
description:
- Name of the group
required: true
default: null
aliases: []
description:
description:
- Description of the group
required: false
default: null
aliases: []
state:
description:
- Create or remove the group
required: false
choices:
- present
- absent
default: present
aliases: []
author: "Chris Hoffman (@chrishoffman)"
'''
EXAMPLES = r'''
- name: Create a new group
win_group:
name: deploy
description: Deploy Group
state: present
- name: Remove a group
win_group:
name: deploy
state: absent
'''
|
DryFlyRyan/BrewsBrothersChillerFrontEnd
|
refs/heads/master
|
node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
|
2779
|
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
|
areteix/powerline
|
refs/heads/develop
|
tools/generate_gradients.py
|
32
|
#!/usr/bin/env python
# vim:fileencoding=utf-8:noet
'''Gradients generator
'''
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import json
import argparse
from itertools import groupby
from colormath.color_objects import sRGBColor, LabColor
from colormath.color_conversions import convert_color
from colormath.color_diff import delta_e_cie2000
from powerline.colorscheme import cterm_to_hex
def num2(s):
try:
return (True, [int(v) for v in s.partition(' ')[::2]])
except TypeError:
return (False, [float(v) for v in s.partition(' ')[::2]])
def rgbint_to_lab(rgbint):
rgb = sRGBColor(
(rgbint >> 16) & 0xFF, (rgbint >> 8) & 0xFF, rgbint & 0xFF,
is_upscaled=True
)
return convert_color(rgb, LabColor)
cterm_to_lab = tuple((rgbint_to_lab(v) for v in cterm_to_hex))
def color(s):
if len(s) <= 3:
return cterm_to_lab[int(s)]
else:
return rgbint_to_lab(int(s, 16))
def nums(s):
return [int(i) for i in s.split()]
def linear_gradient(start_value, stop_value, start_offset, stop_offset, offset):
return start_value + ((offset - start_offset) * (stop_value - start_value) / (stop_offset - start_offset))
def lab_gradient(slab, elab, soff, eoff, off):
svals = slab.get_value_tuple()
evals = elab.get_value_tuple()
return LabColor(*[
linear_gradient(start_value, end_value, soff, eoff, off)
for start_value, end_value in zip(svals, evals)
])
def generate_gradient_function(DATA):
def gradient_function(y):
initial_offset = 0
for offset, start, end in DATA:
if y <= offset:
return lab_gradient(start, end, initial_offset, offset, y)
initial_offset = offset
return gradient_function
def get_upscaled_values(rgb):
return [min(max(0, i), 255) for i in rgb.get_upscaled_value_tuple()]
def get_rgb(lab):
rgb = convert_color(lab, sRGBColor)
rgb = sRGBColor(*get_upscaled_values(rgb), is_upscaled=True)
return rgb.get_rgb_hex()[1:]
def find_color(ulab, colors, ctrans):
cur_distance = float('inf')
cur_color = None
i = 0
for clab in colors:
dist = delta_e_cie2000(ulab, clab)
if dist < cur_distance:
cur_distance = dist
cur_color = (ctrans(i), clab)
i += 1
return cur_color
def print_color(color):
if type(color) is int:
colstr = '5;' + str(color)
else:
rgb = convert_color(color, sRGBColor)
colstr = '2;' + ';'.join((str(i) for i in get_upscaled_values(rgb)))
sys.stdout.write('\033[48;' + colstr + 'm ')
def print_colors(colors, num):
for i in range(num):
color = colors[int(round(i * (len(colors) - 1) / num))]
print_color(color)
sys.stdout.write('\033[0m\n')
def dec_scale_generator(num):
j = 0
r = ''
while num:
r += '\033[{0}m'.format(j % 2)
for i in range(10):
r += str(i)
num -= 1
if not num:
break
j += 1
r += '\033[0m\n'
return r
def compute_steps(gradient, weights):
maxweight = len(gradient) - 1
if weights:
weight_sum = sum(weights)
norm_weights = [100.0 * weight / weight_sum for weight in weights]
steps = [0]
for weight in norm_weights:
steps.append(steps[-1] + weight)
steps.pop(0)
steps.pop(0)
else:
step = m / maxweight
steps = [i * step for i in range(1, maxweight + 1)]
return steps
palettes = {
'16': (cterm_to_lab[:16], lambda c: c),
'256': (cterm_to_lab, lambda c: c),
None: (cterm_to_lab[16:], lambda c: c + 16),
}
def show_scale(rng, num_output):
if not rng and num_output >= 32 and (num_output - 1) // 10 >= 4 and (num_output - 1) % 10 == 0:
sys.stdout.write('0')
sys.stdout.write(''.join(('%*u' % (num_output // 10, i) for i in range(10, 101, 10))))
sys.stdout.write('\n')
else:
if rng:
vmin, vmax = rng[1]
isint = rng[0]
else:
isint = True
vmin = 0
vmax = 100
s = ''
lasts = ' ' + str(vmax)
while len(s) + len(lasts) < num_output:
curpc = len(s) + 1 if s else 0
curval = vmin + curpc * (vmax - vmin) / num_output
if isint:
curval = int(round(curval))
s += str(curval) + ' '
sys.stdout.write(s[:-1] + lasts + '\n')
sys.stdout.write(dec_scale_generator(num_output) + '\n')
if __name__ == '__main__':
p = argparse.ArgumentParser(description=__doc__)
p.add_argument('gradient', nargs='*', metavar='COLOR', type=color, help='List of colors (either indexes from 8-bit palette or 24-bit RGB in hexadecimal notation)')
p.add_argument('-n', '--num_items', metavar='INT', type=int, help='Number of items in resulting list', default=101)
p.add_argument('-N', '--num_output', metavar='INT', type=int, help='Number of characters in sample', default=101)
p.add_argument('-r', '--range', metavar='V1 V2', type=num2, help='Use this range when outputting scale')
p.add_argument('-s', '--show', action='store_true', help='If present output gradient sample')
p.add_argument('-p', '--palette', choices=('16', '256'), help='Use this palette. Defaults to 240-color palette (256 colors without first 16)')
p.add_argument('-w', '--weights', metavar='INT INT ...', type=nums, help='Adjust weights of colors. Number of weights must be equal to number of colors')
p.add_argument('-C', '--omit-terminal', action='store_true', help='If present do not compute values for terminal')
args = p.parse_args()
m = args.num_items
steps = compute_steps(args.gradient, args.weights)
data = [
(weight, args.gradient[i - 1], args.gradient[i])
for weight, i in zip(steps, range(1, len(args.gradient)))
]
gr_func = generate_gradient_function(data)
gradient = [gr_func(y) for y in range(0, m)]
r = [get_rgb(lab) for lab in gradient]
if not args.omit_terminal:
r2 = [find_color(lab, *palettes[args.palette])[0] for lab in gradient]
r3 = [i[0] for i in groupby(r2)]
if not args.omit_terminal:
print(json.dumps(r3) + ',')
print(json.dumps(r2) + ',')
print(json.dumps(r))
if args.show:
print_colors(args.gradient, args.num_output)
if not args.omit_terminal:
print_colors(r3, args.num_output)
print_colors(r2, args.num_output)
print_colors(gradient, args.num_output)
show_scale(args.range, args.num_output)
|
Onirik79/aaritmud
|
refs/heads/master
|
data/proto_mobs/mfdonald/mfdonald_mob_unicorno-azzurro.py
|
1
|
# -*- coding: utf-8 -*-
#= IMPORT ======================================================================
import random
from src.defer import defer_random_time
from src.log import log
from src.utility import is_prefix, multiple_arguments, random_marks
from src.commands.command_say import command_say
#= FUNZIONI ====================================================================
def before_listen_rpg_channel(listener, speaker, target, phrase, ask, exclaim, behavioured):
# Con una probabilità dell'10%
if random.randint(1, 10) != 1:
return
# Continua solo se sta per parlare l'unicorno rosa
if speaker.prototype.code != "mfdonald_mob_unicorno-rosa":
return
# Spezza la frase detta in più parole (o gruppi di parole tra virgolette)
words = multiple_arguments(phrase)
# Controlla se vi sia almeno una parola che inizi nella maniera voluta
# tra quelle dette (occhio che lo script si attiva anche se l'unicorno dice
# parole come 'ringhia', per evitare questo bisognerebbe utilizzare la
# funzione is_same sempre dal modulo utility)
if not is_prefix(("drin", "ring"), words):
return
# Aggiungendo casualmente qualche punto esclamativo a quello che viene detto
to_say = "Ring!%s Ring!%s" % (random_marks(0, 3), random_marks(0, 3))
command_say(listener, to_say)
# Avendo anticipato l'unicorno rosa blocca quello che stava per dire
return True
#- Fine Funzione -
def after_listen_rpg_channel(listener, speaker, target, phrase, ask, exclaim, behavioured):
words = multiple_arguments(phrase)
if not is_prefix(("drin", "ring"), words):
return
if speaker.prototype.code != "mfdonald_mob_unicorno-rosa":
return
# Aggiunge casualmente un prefisso, qualche o finale e punti esclamativi
# e di domanda finali, risponde all'unicorno rosa
prefix = ""
if random.randint(1, 10) == 1:
prefix = "Ha%s " % ("." * random.randint(2, 4))
to_say = "a %s %sHall%s?%s" % (speaker.get_numbered_keyword(looker=listener), prefix, "o" * random.randint(1, 3), random_marks(1, 1))
# Attende qualche secondo.. dopotutto deve raggiungere il telefono!
# Attenzione ad inserire un numero di secondi troppo alto, altrimenti
# l'unicorno rosa potrebbe dire più spesso ring ring di quanto l'unicorno
# azzurro possa rispondere, riempiendo così la ram di callback :P
# Comunque in questo caso non abbiamo assolutamente questo problema
defer_random_time(1, 3, command_say, listener, to_say)
#- Fine Funzione -
|
michath/ConMonkey
|
refs/heads/master
|
media/webrtc/trunk/tools/gyp/test/make/gyptest-noload.py
|
362
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tests the use of the NO_LOAD flag which makes loading sub .mk files
optional.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('all.gyp', chdir='noload')
test.relocate('noload', 'relocate/noload')
test.build('build/all.gyp', test.ALL, chdir='relocate/noload')
test.run_built_executable('exe', chdir='relocate/noload',
stdout='Hello from shared.c.\n')
# Just sanity test that NO_LOAD=lib doesn't break anything.
test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
arguments=['NO_LOAD=lib'])
test.run_built_executable('exe', chdir='relocate/noload',
stdout='Hello from shared.c.\n')
test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
arguments=['NO_LOAD=z'])
test.run_built_executable('exe', chdir='relocate/noload',
stdout='Hello from shared.c.\n')
# Make sure we can rebuild without reloading the sub .mk file.
with open('relocate/noload/main.c', 'a') as src_file:
src_file.write("\n")
test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
arguments=['NO_LOAD=lib'])
test.run_built_executable('exe', chdir='relocate/noload',
stdout='Hello from shared.c.\n')
# Change shared.c, but verify that it doesn't get rebuild if we don't load it.
with open('relocate/noload/lib/shared.c', 'w') as shared_file:
shared_file.write(
'#include "shared.h"\n'
'const char kSharedStr[] = "modified";\n'
)
test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
arguments=['NO_LOAD=lib'])
test.run_built_executable('exe', chdir='relocate/noload',
stdout='Hello from shared.c.\n')
test.pass_test()
|
mdavid/zulip
|
refs/heads/master
|
zproject/urls.py
|
109
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.views.generic import TemplateView, RedirectView
import os.path
import zerver.forms
# NB: There are several other pieces of code which route requests by URL:
#
# - runtornado.py has its own URL list for Tornado views. See the
# invocation of web.Application in that file.
#
# - The Nginx config knows which URLs to route to Django or Tornado.
#
# - Likewise for the local dev server in tools/run-dev.py.
urlpatterns = patterns('',
url(r'^$', 'zerver.views.home'),
# We have a desktop-specific landing page in case we change our / to not log in in the future. We don't
# want to require a new desktop app build for everyone in that case
url(r'^desktop_home/$', 'zerver.views.desktop_home'),
url(r'^accounts/login/sso/$', 'zerver.views.remote_user_sso', name='login-sso'),
url(r'^accounts/login/jwt/$', 'zerver.views.remote_user_jwt', name='login-jwt'),
url(r'^accounts/login/google/$', 'zerver.views.start_google_oauth2'),
url(r'^accounts/login/google/done/$', 'zerver.views.finish_google_oauth2'),
url(r'^accounts/login/local/$', 'zerver.views.dev_direct_login'),
# We have two entries for accounts/login to allow reverses on the Django
# view we're wrapping to continue to function.
url(r'^accounts/login/', 'zerver.views.login_page', {'template_name': 'zerver/login.html'}),
url(r'^accounts/login/', 'django.contrib.auth.views.login', {'template_name': 'zerver/login.html'}),
url(r'^accounts/logout/', 'zerver.views.logout_then_login'),
url(r'^accounts/webathena_kerberos_login/', 'zerver.views.webathena_kerberos_login'),
url(r'^accounts/password/reset/$', 'django.contrib.auth.views.password_reset',
{'post_reset_redirect' : '/accounts/password/reset/done/',
'template_name': 'zerver/reset.html',
'email_template_name': 'registration/password_reset_email.txt',
}),
url(r'^accounts/password/reset/done/$', 'django.contrib.auth.views.password_reset_done',
{'template_name': 'zerver/reset_emailed.html'}),
url(r'^accounts/password/reset/(?P<uidb64>[0-9A-Za-z]+)/(?P<token>.+)/$',
'django.contrib.auth.views.password_reset_confirm',
{'post_reset_redirect' : '/accounts/password/done/',
'template_name': 'zerver/reset_confirm.html',
'set_password_form' : zerver.forms.LoggingSetPasswordForm}),
url(r'^accounts/password/done/$', 'django.contrib.auth.views.password_reset_complete',
{'template_name': 'zerver/reset_done.html'}),
# Avatar
url(r'^avatar/(?P<email>[\S]+)?', 'zerver.views.avatar'),
# Registration views, require a confirmation ID.
url(r'^accounts/home/', 'zerver.views.accounts_home'),
url(r'^accounts/send_confirm/(?P<email>[\S]+)?',
TemplateView.as_view(template_name='zerver/accounts_send_confirm.html'), name='send_confirm'),
url(r'^accounts/register/', 'zerver.views.accounts_register'),
url(r'^accounts/do_confirm/(?P<confirmation_key>[\w]+)', 'confirmation.views.confirm'),
url(r'^invite/$', 'zerver.views.initial_invite_page', name='initial-invite-users'),
# Unsubscription endpoint. Used for various types of e-mails (day 1 & 2,
# missed PMs, etc.)
url(r'^accounts/unsubscribe/(?P<type>[\w]+)/(?P<token>[\w]+)',
'zerver.views.email_unsubscribe'),
# Portico-styled page used to provide email confirmation of terms acceptance.
url(r'^accounts/accept_terms/$', 'zerver.views.accounts_accept_terms'),
# Terms of service and privacy policy
url(r'^terms/$', TemplateView.as_view(template_name='zerver/terms.html')),
url(r'^terms-enterprise/$', TemplateView.as_view(template_name='zerver/terms-enterprise.html')),
url(r'^privacy/$', TemplateView.as_view(template_name='zerver/privacy.html')),
# Login/registration
url(r'^register/$', 'zerver.views.accounts_home', name='register'),
url(r'^login/$', 'zerver.views.login_page', {'template_name': 'zerver/login.html'}),
# A registration page that passes through the domain, for totally open realms.
url(r'^register/(?P<domain>\S+)/$', 'zerver.views.accounts_home_with_domain'),
# API and integrations documentation
url(r'^api/$', TemplateView.as_view(template_name='zerver/api.html')),
url(r'^api/endpoints/$', 'zerver.views.api_endpoint_docs'),
url(r'^integrations/$', TemplateView.as_view(template_name='zerver/integrations.html')),
url(r'^apps/$', TemplateView.as_view(template_name='zerver/apps.html')),
url(r'^robots\.txt$', RedirectView.as_view(url='/static/robots.txt')),
# Landing page, features pages, signup form, etc.
url(r'^hello/$', TemplateView.as_view(template_name='zerver/hello.html'),
name='landing-page'),
url(r'^new-user/$', RedirectView.as_view(url='/hello')),
url(r'^features/$', TemplateView.as_view(template_name='zerver/features.html')),
)
# These are used for voyager development. On a real voyager instance,
# these files would be served by nginx.
if settings.DEVELOPMENT and settings.LOCAL_UPLOADS_DIR is not None:
urlpatterns += patterns('',
url(r'^user_avatars/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars")}),
url(r'^user_uploads/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': os.path.join(settings.LOCAL_UPLOADS_DIR, "files")}),
)
urlpatterns += patterns('zerver.views',
# These are json format views used by the web client. They require a logged in browser.
url(r'^json/update_pointer$', 'json_update_pointer'),
url(r'^json/get_old_messages$', 'messages.json_get_old_messages'),
url(r'^json/get_public_streams$', 'json_get_public_streams'),
url(r'^json/rename_stream$', 'json_rename_stream'),
url(r'^json/make_stream_public$', 'json_make_stream_public'),
url(r'^json/make_stream_private$', 'json_make_stream_private'),
url(r'^json/send_message$', 'messages.json_send_message'),
url(r'^json/invite_users$', 'json_invite_users'),
url(r'^json/bulk_invite_users$', 'json_bulk_invite_users'),
url(r'^json/settings/change$', 'json_change_settings'),
url(r'^json/notify_settings/change$', 'json_change_notify_settings'),
url(r'^json/ui_settings/change$', 'json_change_ui_settings'),
url(r'^json/subscriptions/remove$', 'json_remove_subscriptions'),
url(r'^json/subscriptions/add$', 'json_add_subscriptions'),
url(r'^json/subscriptions/exists$', 'json_stream_exists'),
url(r'^json/subscriptions/property$', 'json_subscription_property'),
url(r'^json/get_subscribers$', 'json_get_subscribers'),
url(r'^json/fetch_api_key$', 'json_fetch_api_key'),
url(r'^json/update_active_status$', 'json_update_active_status'),
url(r'^json/get_active_statuses$', 'json_get_active_statuses'),
url(r'^json/tutorial_send_message$', 'json_tutorial_send_message'),
url(r'^json/tutorial_status$', 'json_tutorial_status'),
url(r'^json/change_enter_sends$', 'json_change_enter_sends'),
url(r'^json/get_profile$', 'json_get_profile'),
url(r'^json/report_error$', 'json_report_error'),
url(r'^json/report_send_time$', 'json_report_send_time'),
url(r'^json/report_narrow_time$', 'json_report_narrow_time'),
url(r'^json/report_unnarrow_time$', 'json_report_unnarrow_time'),
url(r'^json/update_message_flags$', 'messages.json_update_flags'),
url(r'^json/register$', 'json_events_register'),
url(r'^json/upload_file$', 'json_upload_file'),
url(r'^json/messages_in_narrow$', 'messages.json_messages_in_narrow'),
url(r'^json/update_message$', 'messages.json_update_message'),
url(r'^json/fetch_raw_message$', 'messages.json_fetch_raw_message'),
url(r'^json/refer_friend$', 'json_refer_friend'),
url(r'^json/set_alert_words$', 'json_set_alert_words'),
url(r'^json/set_muted_topics$', 'json_set_muted_topics'),
url(r'^json/set_avatar$', 'json_set_avatar'),
url(r'^json/time_setting$', 'json_time_setting'),
url(r'^json/left_side_userlist$', 'json_left_side_userlist'),
# This json format view is used by the LEGACY pre-REST API. It
# requires an API key.
url(r'^api/v1/send_message$', 'messages.api_send_message'),
# This json format view used by the mobile apps accepts a username
# password/pair and returns an API key.
url(r'^api/v1/fetch_api_key$', 'api_fetch_api_key'),
# These are integration-specific web hook callbacks
url(r'^api/v1/external/beanstalk$' , 'webhooks.api_beanstalk_webhook'),
url(r'^api/v1/external/github$', 'webhooks.api_github_landing'),
url(r'^api/v1/external/jira$', 'webhooks.api_jira_webhook'),
url(r'^api/v1/external/pivotal$', 'webhooks.api_pivotal_webhook'),
url(r'^api/v1/external/newrelic$', 'webhooks.api_newrelic_webhook'),
url(r'^api/v1/external/bitbucket$', 'webhooks.api_bitbucket_webhook'),
url(r'^api/v1/external/desk$', 'webhooks.api_deskdotcom_webhook'),
url(r'^api/v1/external/stash$', 'webhooks.api_stash_webhook'),
url(r'^api/v1/external/freshdesk$', 'webhooks.api_freshdesk_webhook'),
url(r'^api/v1/external/zendesk$', 'webhooks.api_zendesk_webhook'),
url(r'^api/v1/external/pagerduty$', 'webhooks.api_pagerduty_webhook'),
url(r'^user_uploads/(?P<realm_id>(\d*|unk))/(?P<filename>.*)', 'get_uploaded_file'),
)
# JSON format views used by the redesigned API, accept basic auth username:password.
v1_api_and_json_patterns = patterns('zerver.views',
url(r'^export$', 'rest_dispatch',
{'GET': 'export'}),
url(r'^streams$', 'rest_dispatch',
{'GET': 'get_streams_backend'}),
# GET returns "stream info" (undefined currently?), HEAD returns whether stream exists (200 or 404)
url(r'^streams/(?P<stream_name>.*)/members$', 'rest_dispatch',
{'GET': 'get_subscribers_backend'}),
url(r'^streams/(?P<stream_name>.*)$', 'rest_dispatch',
{'HEAD': 'stream_exists_backend',
'GET': 'stream_exists_backend',
'PATCH': 'update_stream_backend',
'DELETE': 'deactivate_stream_backend'}),
url(r'^users$', 'rest_dispatch',
{'GET': 'get_members_backend',
'POST': 'create_user_backend'}),
url(r'^users/me$', 'rest_dispatch',
{'GET': 'get_profile_backend'}),
url(r'^users/me/enter-sends$', 'rest_dispatch',
{'POST': 'json_change_enter_sends'}),
url(r'^users/me/pointer$', 'rest_dispatch',
{'GET': 'get_pointer_backend',
'PUT': 'update_pointer_backend'}),
# GET lists your streams, POST bulk adds, PATCH bulk modifies/removes
url(r'^users/me/subscriptions$', 'rest_dispatch',
{'GET': 'list_subscriptions_backend',
'POST': 'add_subscriptions_backend',
'PATCH': 'update_subscriptions_backend'}),
url(r'^users/me/alert_words$', 'rest_dispatch',
{'GET': 'list_alert_words',
'PUT': 'set_alert_words',
'PATCH': 'add_alert_words',
'DELETE': 'remove_alert_words'}),
url(r'^default_streams$', 'rest_dispatch',
{'PATCH': 'add_default_stream',
'DELETE': 'remove_default_stream'}),
url(r'^realm$', 'rest_dispatch',
{'PATCH': 'update_realm'}),
url(r'^users/me/api_key/regenerate$', 'rest_dispatch',
{'POST': 'regenerate_api_key'}),
url(r'^users/me/presence$', 'rest_dispatch',
{'POST': 'update_active_status_backend'}),
# Endpoint used by iOS devices to register their
# unique APNS device token
url(r'^users/me/apns_device_token$', 'rest_dispatch',
{'POST' : 'add_apns_device_token',
'DELETE': 'remove_apns_device_token'}),
url(r'^users/me/android_gcm_reg_id$', 'rest_dispatch',
{'POST': 'add_android_reg_id',
'DELETE': 'remove_android_reg_id'}),
url(r'^users/(?P<email>.*)/reactivate$', 'rest_dispatch',
{'POST': 'reactivate_user_backend'}),
url(r'^users/(?P<email>.*)$', 'rest_dispatch',
{'PATCH': 'update_user_backend',
'DELETE': 'deactivate_user_backend'}),
url(r'^bots$', 'rest_dispatch',
{'GET': 'get_bots_backend',
'POST': 'add_bot_backend'}),
url(r'^bots/(?P<email>.*)/api_key/regenerate$', 'rest_dispatch',
{'POST': 'regenerate_bot_api_key'}),
url(r'^bots/(?P<email>.*)$', 'rest_dispatch',
{'PATCH': 'patch_bot_backend',
'DELETE': 'deactivate_bot_backend'}),
url(r'^register$', 'rest_dispatch',
{'POST': 'api_events_register'}),
# Returns a 204, used by desktop app to verify connectivity status
url(r'generate_204$', 'generate_204'),
) + patterns('zerver.views.messages',
# GET returns messages, possibly filtered, POST sends a message
url(r'^messages$', 'rest_dispatch',
{'GET': 'get_old_messages_backend',
'PATCH': 'update_message_backend',
'POST': 'send_message_backend'}),
url(r'^messages/render$', 'rest_dispatch',
{'GET': 'render_message_backend'}),
url(r'^messages/flags$', 'rest_dispatch',
{'POST': 'update_message_flags'}),
) + patterns('zerver.tornadoviews',
url(r'^events$', 'rest_dispatch',
{'GET': 'get_events_backend',
'DELETE': 'cleanup_event_queue'}),
)
if not settings.VOYAGER:
v1_api_and_json_patterns += patterns('',
# Still scoped to api/v1/, but under a different project
url(r'^deployments/', include('zilencer.urls.api')),
)
urlpatterns += patterns('',
url(r'^', include('zilencer.urls.pages')),
)
urlpatterns += patterns('',
url(r'^', include('analytics.urls')),
)
urlpatterns += patterns('',
url(r'^', include('corporate.urls')),
)
urlpatterns += patterns('zerver.tornadoviews',
# Tornado views
url(r'^json/get_events$', 'json_get_events'),
# Used internally for communication between Django and Tornado processes
url(r'^notify_tornado$', 'notify'),
)
# Include the dual-use patterns twice
urlpatterns += patterns('',
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
)
if settings.DEVELOPMENT:
use_prod_static = getattr(settings, 'PIPELINE', False)
static_root = os.path.join(settings.DEPLOY_ROOT,
'prod-static/serve' if use_prod_static else 'static')
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': static_root}))
|
naslanidis/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_snmp_host.py
|
8
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: nxos_snmp_host
version_added: "2.2"
short_description: Manages SNMP host configuration.
description:
- Manages SNMP host configuration parameters.
extends_documentation_fragment: nxos
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- C(state=absent) removes the host configuration if it is configured.
options:
snmp_host:
description:
- IP address of hostname of target host.
required: true
version:
description:
- SNMP version.
required: false
default: v2c
choices: ['v2c', 'v3']
community:
description:
- Community string or v3 username.
required: false
default: null
udp:
description:
- UDP port number (0-65535).
required: false
default: null
type:
description:
- type of message to send to host.
required: false
default: traps
choices: ['trap', 'inform']
vrf:
description:
- VRF to use to source traffic to source.
required: false
default: null
vrf_filter:
description:
- Name of VRF to filter.
required: false
default: null
src_intf:
description:
- Source interface.
required: false
default: null
state:
description:
- Manage the state of the resource.
required: true
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure snmp host is configured
- nxos_snmp_host:
snmp_host: 3.3.3.3
community: TESTING
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"community": "TESTING", "snmp_host": "3.3.3.3",
"snmp_type": "trap", "version": "v2c", "vrf_filter": "one_more_vrf"}
existing:
description: k/v pairs of existing snmp host
type: dict
sample: {"community": "TESTING", "snmp_type": "trap",
"udp": "162", "v3": "noauth", "version": "v2c",
"vrf": "test_vrf", "vrf_filter": ["test_vrf",
"another_test_vrf"]}
end_state:
description: k/v pairs of switchport after module execution
returned: always
type: dict or null
sample: {"community": "TESTING", "snmp_type": "trap",
"udp": "162", "v3": "noauth", "version": "v2c",
"vrf": "test_vrf", "vrf_filter": ["test_vrf",
"another_test_vrf", "one_more_vrf"]}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["snmp-server host 3.3.3.3 filter-vrf another_test_vrf"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import json
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
except AttributeError:
try:
commands.insert(0, 'configure')
module.cli.add_commands(commands, output='config')
module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def get_cli_body_ssh(command, response, module):
"""Get response for when transport=cli. This is kind of a hack and mainly
needed because these modules were originally written for NX-API. And
not every command supports "| json" when using cli/ssh. As such, we assume
if | json returns an XML string, it is a valid command, but that the
resource doesn't exist yet. Instead, the output will be a raw string
when issuing commands containing 'show run'.
"""
if 'xml' in response[0]:
body = []
elif 'show run' in command:
body = response
else:
try:
body = [json.loads(response[0])]
except ValueError:
module.fail_json(msg='Command does not support JSON output',
command=command)
return body
def execute_show(cmds, module, command_type=None):
command_type_map = {
'cli_show': 'json',
'cli_show_ascii': 'text'
}
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
except AttributeError:
try:
if command_type:
command_type = command_type_map.get(command_type)
module.cli.add_commands(cmds, output=command_type)
response = module.cli.run_commands()
else:
module.cli.add_commands(cmds, raw=True)
response = module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
if 'show run' not in command:
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh(command, response, module)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def get_snmp_host(host, module):
command = 'show snmp host'
body = execute_show_command(command, module)
host_map = {
'port': 'udp',
'version': 'version',
'level': 'v3',
'type': 'snmp_type',
'secname': 'community'
}
resource = {}
if body:
try:
resource_table = body[0]['TABLE_host']['ROW_host']
if isinstance(resource_table, dict):
resource_table = [resource_table]
for each in resource_table:
key = str(each['host'])
src = each.get('src_intf', None)
host_resource = apply_key_map(host_map, each)
if src:
host_resource['src_intf'] = src.split(':')[1].strip()
vrf_filt = each.get('TABLE_vrf_filters', None)
if vrf_filt:
vrf_filter = vrf_filt['ROW_vrf_filters']['vrf_filter'].split(':')[1].split(',')
filters = [vrf.strip() for vrf in vrf_filter]
host_resource['vrf_filter'] = filters
vrf = each.get('vrf', None)
if vrf:
host_resource['vrf'] = vrf.split(':')[1].strip()
resource[key] = host_resource
except (KeyError, AttributeError, TypeError):
return resource
find = resource.get(host, None)
if find:
fix_find = {}
for (key, value) in find.items():
if isinstance(value, str):
fix_find[key] = value.strip()
else:
fix_find[key] = value
return fix_find
else:
return {}
else:
return {}
def remove_snmp_host(host, existing):
commands = []
if existing['version'] == 'v3':
existing['version'] = '3'
command = 'no snmp-server host {0} {snmp_type} version \
{version} {v3} {community}'.format(host, **existing)
elif existing['version'] == 'v2c':
existing['version'] = '2c'
command = 'no snmp-server host {0} {snmp_type} version \
{version} {community}'.format(host, **existing)
if command:
commands.append(command)
return commands
def config_snmp_host(delta, proposed, existing, module):
commands = []
command_builder = []
host = proposed['snmp_host']
cmd = 'snmp-server host {0}'.format(proposed['snmp_host'])
snmp_type = delta.get('snmp_type', None)
version = delta.get('version', None)
ver = delta.get('v3', None)
community = delta.get('community', None)
command_builder.append(cmd)
if any([snmp_type, version, ver, community]):
type_string = snmp_type or existing.get('type')
if type_string:
command_builder.append(type_string)
version = version or existing.get('version')
if version:
if version == 'v2c':
vn = '2c'
elif version == 'v3':
vn = '3'
version_string = 'version {0}'.format(vn)
command_builder.append(version_string)
if ver:
ver_string = ver or existing.get('v3')
command_builder.append(ver_string)
if community:
community_string = community or existing.get('community')
command_builder.append(community_string)
cmd = ' '.join(command_builder)
commands.append(cmd)
CMDS = {
'vrf_filter': 'snmp-server host {0} filter-vrf {vrf_filter}',
'vrf': 'snmp-server host {0} use-vrf {vrf}',
'udp': 'snmp-server host {0} udp-port {udp}',
'src_intf': 'snmp-server host {0} source-interface {src_intf}'
}
for key, value in delta.items():
if key in ['vrf_filter', 'vrf', 'udp', 'src_intf']:
command = CMDS.get(key, None)
if command:
cmd = command.format(host, **delta)
commands.append(cmd)
cmd = None
return commands
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def main():
argument_spec = dict(
snmp_host=dict(required=True, type='str'),
community=dict(type='str'),
udp=dict(type='str'),
version=dict(choices=['v2c', 'v3'], default='v2c'),
src_intf=dict(type='str'),
v3=dict(choices=['noauth', 'auth', 'priv']),
vrf_filter=dict(type='str'),
vrf=dict(type='str'),
snmp_type=dict(choices=['trap', 'inform'], default='trap'),
state=dict(choices=['absent', 'present'], default='present'),
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
snmp_host = module.params['snmp_host']
community = module.params['community']
udp = module.params['udp']
version = module.params['version']
src_intf = module.params['src_intf']
v3 = module.params['v3']
vrf_filter = module.params['vrf_filter']
vrf = module.params['vrf']
snmp_type = module.params['snmp_type']
state = module.params['state']
if snmp_type == 'inform' and version != 'v3':
module.fail_json(msg='inform requires snmp v3')
if version == 'v2c' and v3:
module.fail_json(msg='param: "v3" should not be used when '
'using version v2c')
if not any([vrf_filter, vrf, udp, src_intf]):
if not all([snmp_type, version, community]):
module.fail_json(msg='when not configuring options like '
'vrf_filter, vrf, udp, and src_intf,'
'the following params are required: '
'type, version, community')
if version == 'v3' and v3 is None:
module.fail_json(msg='when using version=v3, the param v3 '
'(options: auth, noauth, priv) is also required')
existing = get_snmp_host(snmp_host, module)
# existing returns the list of vrfs configured for a given host
# checking to see if the proposed is in the list
store = existing.get('vrf_filter', None)
if existing and store:
if vrf_filter not in existing['vrf_filter']:
existing['vrf_filter'] = None
else:
existing['vrf_filter'] = vrf_filter
args = dict(
community=community,
snmp_host=snmp_host,
udp=udp,
version=version,
src_intf=src_intf,
vrf_filter=vrf_filter,
v3=v3,
vrf=vrf,
snmp_type=snmp_type
)
proposed = dict((k, v) for k, v in args.items() if v is not None)
delta = dict(set(proposed.items()).difference(existing.items()))
changed = False
commands = []
end_state = existing
if state == 'absent':
if existing:
command = remove_snmp_host(snmp_host, existing)
commands.append(command)
elif state == 'present':
if delta:
command = config_snmp_host(delta, proposed, existing, module)
commands.append(command)
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
execute_config_command(cmds, module)
end_state = get_snmp_host(snmp_host, module)
if 'configure' in cmds:
cmds.pop(0)
if store:
existing['vrf_filter'] = store
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
module.exit_json(**results)
if __name__ == "__main__":
main()
|
AlCutter/googletest-fbsd
|
refs/heads/master
|
test/gtest_break_on_failure_unittest.py
|
2140
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's break-on-failure mode.
A user can ask Google Test to seg-fault when an assertion fails, using
either the GTEST_BREAK_ON_FAILURE environment variable or the
--gtest_break_on_failure flag. This script tests such functionality
by invoking gtest_break_on_failure_unittest_ (a program written with
Google Test) with different environments and command line flags.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gtest_test_utils
import os
import sys
# Constants.
IS_WINDOWS = os.name == 'nt'
# The environment variable for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'
# The command line flag for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'
# The environment variable for enabling/disabling the throw-on-failure mode.
THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'
# The environment variable for enabling/disabling the catch-exceptions mode.
CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
# Path to the gtest_break_on_failure_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_break_on_failure_unittest_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
def Run(command):
"""Runs a command; returns 1 if it was killed by a signal, or 0 otherwise."""
p = gtest_test_utils.Subprocess(command, env=environ)
if p.terminated_by_signal:
return 1
else:
return 0
# The tests.
class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable or
the --gtest_break_on_failure flag to turn assertion failures into
segmentation faults.
"""
def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
"""Runs gtest_break_on_failure_unittest_ and verifies that it does
(or does not) have a seg-fault.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
expect_seg_fault: 1 if the program is expected to generate a seg-fault;
0 otherwise.
"""
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
if env_var_value is None:
env_var_value_msg = ' is not set'
else:
env_var_value_msg = '=' + env_var_value
if flag_value is None:
flag = ''
elif flag_value == '0':
flag = '--%s=0' % BREAK_ON_FAILURE_FLAG
else:
flag = '--%s' % BREAK_ON_FAILURE_FLAG
command = [EXE_PATH]
if flag:
command.append(flag)
if expect_seg_fault:
should_or_not = 'should'
else:
should_or_not = 'should not'
has_seg_fault = Run(command)
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
(BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
should_or_not))
self.assert_(has_seg_fault == expect_seg_fault, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(env_var_value=None,
flag_value=None,
expect_seg_fault=0)
def testEnvVar(self):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value=None,
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value=None,
expect_seg_fault=1)
def testFlag(self):
"""Tests using the --gtest_break_on_failure flag."""
self.RunAndVerify(env_var_value=None,
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
def testFlagOverridesEnvVar(self):
"""Tests that the flag overrides the environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='0',
flag_value='1',
expect_seg_fault=1)
self.RunAndVerify(env_var_value='1',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
def testBreakOnFailureOverridesThrowOnFailure(self):
"""Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
if IS_WINDOWS:
def testCatchExceptionsDoesNotInterfere(self):
"""Tests that gtest_catch_exceptions doesn't interfere."""
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
if __name__ == '__main__':
gtest_test_utils.Main()
|
grimmjow8/ansible
|
refs/heads/devel
|
test/integration/targets/module_utils/module_utils/qux1/quux.py
|
298
|
data = 'qux1'
|
hyperized/ansible
|
refs/heads/devel
|
test/units/module_utils/hwc/test_dict_comparison.py
|
23
|
# -*- coding: utf-8 -*-
# 2018.07.26 --- use DictComparison instead of GcpRequest
#
# (c) 2016, Tom Melendez <tom@supertom.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from units.compat import unittest
from ansible.module_utils.hwc_utils import are_different_dicts
class HwcDictComparisonTestCase(unittest.TestCase):
def test_simple_no_difference(self):
value1 = {
'foo': 'bar',
'test': 'original'
}
self.assertFalse(are_different_dicts(value1, value1))
def test_simple_different(self):
value1 = {
'foo': 'bar',
'test': 'original'
}
value2 = {
'foo': 'bar',
'test': 'different'
}
value3 = {
'test': 'original'
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
def test_nested_dictionaries_no_difference(self):
value1 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
},
'test': 'original'
}
self.assertFalse(are_different_dicts(value1, value1))
def test_nested_dictionaries_with_difference(self):
value1 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
},
'test': 'original'
}
value2 = {
'foo': {
'quiet': {
'tree': 'baz'
},
'bar': 'hello'
},
'test': 'original'
}
value3 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
}
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
def test_arrays_strings_no_difference(self):
value1 = {
'foo': [
'baz',
'bar'
]
}
self.assertFalse(are_different_dicts(value1, value1))
def test_arrays_strings_with_difference(self):
value1 = {
'foo': [
'baz',
'bar',
]
}
value2 = {
'foo': [
'baz',
'hello'
]
}
value3 = {
'foo': [
'bar',
]
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
def test_arrays_dicts_with_no_difference(self):
value1 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
},
{
'different': 'dict'
}
]
}
self.assertFalse(are_different_dicts(value1, value1))
def test_arrays_dicts_with_difference(self):
value1 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
},
{
'different': 'dict'
}
]
}
value2 = {
'foo': [
{
'test': 'value2',
'foo': 'bar2'
},
]
}
value3 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
}
]
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
|
chineyting/lab4-Info3180
|
refs/heads/master
|
server/lib/werkzeug/local.py
|
310
|
# -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
storage[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
it, will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
Yu can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
# this is a proxy
response = _response_local()
Whenever something is bound to l.user / l.request the proxy objects
will forward all operations. If no object is bound a :exc:`RuntimeError`
will be raised.
To create proxies to :class:`Local` or :class:`LocalStack` objects,
call the object as shown above. If you want to have a proxy to an
object looked up by a function, you can (as of Werkzeug 0.6.1) pass
a function to the :class:`LocalProxy` constructor::
session = LocalProxy(lambda: get_current_request().session)
.. versionchanged:: 0.6.1
The class can be instanciated with a callable as well now.
"""
__slots__ = ('__local', '__dict__', '__name__')
def __init__(self, local, name=None):
object.__setattr__(self, '_LocalProxy__local', local)
object.__setattr__(self, '__name__', name)
def _get_current_object(self):
"""Return the current object. This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
if not hasattr(self.__local, '__release_local__'):
return self.__local()
try:
return getattr(self.__local, self.__name__)
except AttributeError:
raise RuntimeError('no object bound to %s' % self.__name__)
@property
def __dict__(self):
try:
return self._get_current_object().__dict__
except RuntimeError:
raise AttributeError('__dict__')
def __repr__(self):
try:
obj = self._get_current_object()
except RuntimeError:
return '<%s unbound>' % self.__class__.__name__
return repr(obj)
def __bool__(self):
try:
return bool(self._get_current_object())
except RuntimeError:
return False
def __unicode__(self):
try:
return unicode(self._get_current_object())
except RuntimeError:
return repr(self)
def __dir__(self):
try:
return dir(self._get_current_object())
except RuntimeError:
return []
def __getattr__(self, name):
if name == '__members__':
return dir(self._get_current_object())
return getattr(self._get_current_object(), name)
def __setitem__(self, key, value):
self._get_current_object()[key] = value
def __delitem__(self, key):
del self._get_current_object()[key]
if PY2:
__getslice__ = lambda x, i, j: x._get_current_object()[i:j]
def __setslice__(self, i, j, seq):
self._get_current_object()[i:j] = seq
def __delslice__(self, i, j):
del self._get_current_object()[i:j]
__setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v)
__delattr__ = lambda x, n: delattr(x._get_current_object(), n)
__str__ = lambda x: str(x._get_current_object())
__lt__ = lambda x, o: x._get_current_object() < o
__le__ = lambda x, o: x._get_current_object() <= o
__eq__ = lambda x, o: x._get_current_object() == o
__ne__ = lambda x, o: x._get_current_object() != o
__gt__ = lambda x, o: x._get_current_object() > o
__ge__ = lambda x, o: x._get_current_object() >= o
__cmp__ = lambda x, o: cmp(x._get_current_object(), o)
__hash__ = lambda x: hash(x._get_current_object())
__call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw)
__len__ = lambda x: len(x._get_current_object())
__getitem__ = lambda x, i: x._get_current_object()[i]
__iter__ = lambda x: iter(x._get_current_object())
__contains__ = lambda x, i: i in x._get_current_object()
__add__ = lambda x, o: x._get_current_object() + o
__sub__ = lambda x, o: x._get_current_object() - o
__mul__ = lambda x, o: x._get_current_object() * o
__floordiv__ = lambda x, o: x._get_current_object() // o
__mod__ = lambda x, o: x._get_current_object() % o
__divmod__ = lambda x, o: x._get_current_object().__divmod__(o)
__pow__ = lambda x, o: x._get_current_object() ** o
__lshift__ = lambda x, o: x._get_current_object() << o
__rshift__ = lambda x, o: x._get_current_object() >> o
__and__ = lambda x, o: x._get_current_object() & o
__xor__ = lambda x, o: x._get_current_object() ^ o
__or__ = lambda x, o: x._get_current_object() | o
__div__ = lambda x, o: x._get_current_object().__div__(o)
__truediv__ = lambda x, o: x._get_current_object().__truediv__(o)
__neg__ = lambda x: -(x._get_current_object())
__pos__ = lambda x: +(x._get_current_object())
__abs__ = lambda x: abs(x._get_current_object())
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object())
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
__index__ = lambda x: x._get_current_object().__index__()
__coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o)
__enter__ = lambda x: x._get_current_object().__enter__()
__exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw)
__radd__ = lambda x, o: o + x._get_current_object()
__rsub__ = lambda x, o: o - x._get_current_object()
__rmul__ = lambda x, o: o * x._get_current_object()
__rdiv__ = lambda x, o: o / x._get_current_object()
if PY2:
__rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o)
else:
__rtruediv__ = __rdiv__
__rfloordiv__ = lambda x, o: o // x._get_current_object()
__rmod__ = lambda x, o: o % x._get_current_object()
__rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o)
|
brett-patterson/pyface
|
refs/heads/master
|
pyface/wx/scrolled_message_dialog.py
|
5
|
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought util package component>
#------------------------------------------------------------------------------
import wx
from wx.lib.layoutf import Layoutf
class ScrolledMessageDialog(wx.Dialog):
def __init__(self, parent, msg, caption, pos = wx.DefaultPosition, size = (500,300)):
wx.Dialog.__init__(self, parent, -1, caption, pos, size)
x, y = pos
if x == -1 and y == -1:
self.CenterOnScreen(wx.BOTH)
text = wx.TextCtrl(self, -1, msg, wx.DefaultPosition, wx.DefaultSize,
wx.TE_READONLY |
wx.TE_MULTILINE |
wx.HSCROLL |
wx.TE_RICH2
)
font = wx.Font(8, wx.MODERN, wx.NORMAL, wx.NORMAL)
text.SetStyle(0, len(msg), wx.TextAttr(font=font))
ok = wx.Button(self, wx.ID_OK, "OK")
text.SetConstraints(Layoutf('t=t5#1;b=t5#2;l=l5#1;r=r5#1', (self,ok)))
ok.SetConstraints(Layoutf('b=b5#1;x%w50#1;w!80;h!25', (self,)))
self.SetAutoLayout(1)
self.Layout()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.