code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
# (c) 2016, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# Based on homebrew (Andrew Dunham <andrew@du.nham.ca>)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: homebrew_tap
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
short_description: Tap a Homebrew repository.
description:
- Tap external Homebrew repositories.
version_added: "1.6"
options:
name:
description:
- The GitHub user/organization repository to tap.
required: true
aliases: ['tap']
url:
description:
- The optional git URL of the repository to tap. The URL is not
assumed to be on GitHub, and the protocol doesn't have to be HTTP.
Any location and protocol that git can handle is fine.
- I(name) option may not be a list of multiple taps (but a single
tap instead) when this option is provided.
required: false
version_added: "2.2"
state:
description:
- state of the repository.
choices: [ 'present', 'absent' ]
required: false
default: 'present'
requirements: [ homebrew ]
'''
EXAMPLES = '''
- homebrew_tap:
name: homebrew/dupes
- homebrew_tap:
name: homebrew/dupes
state: absent
- homebrew_tap:
name: homebrew/dupes,homebrew/science
state: present
- homebrew_tap:
name: telemachus/brew
url: 'https://bitbucket.org/telemachus/brew'
'''
import re
def a_valid_tap(tap):
'''Returns True if the tap is valid.'''
regex = re.compile(r'^([\w-]+)/(homebrew-)?([\w-]+)$')
return regex.match(tap)
def already_tapped(module, brew_path, tap):
'''Returns True if already tapped.'''
rc, out, err = module.run_command([
brew_path,
'tap',
])
taps = [tap_.strip().lower() for tap_ in out.split('\n') if tap_]
tap_name = re.sub('homebrew-', '', tap.lower())
return tap_name in taps
def add_tap(module, brew_path, tap, url=None):
'''Adds a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif not already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'tap',
tap,
url,
])
if already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully tapped: %s' % tap
else:
failed = True
msg = 'failed to tap: %s' % tap
else:
msg = 'already tapped: %s' % tap
return (failed, changed, msg)
def add_taps(module, brew_path, taps):
'''Adds one or more taps.'''
failed, unchanged, added, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = add_tap(module, brew_path, tap)
if failed:
break
if changed:
added += 1
else:
unchanged += 1
if failed:
msg = 'added: %d, unchanged: %d, error: ' + msg
msg = msg % (added, unchanged)
elif added:
changed = True
msg = 'added: %d, unchanged: %d' % (added, unchanged)
else:
msg = 'added: %d, unchanged: %d' % (added, unchanged)
return (failed, changed, msg)
def remove_tap(module, brew_path, tap):
'''Removes a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'untap',
tap,
])
if not already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully untapped: %s' % tap
else:
failed = True
msg = 'failed to untap: %s' % tap
else:
msg = 'already untapped: %s' % tap
return (failed, changed, msg)
def remove_taps(module, brew_path, taps):
'''Removes one or more taps.'''
failed, unchanged, removed, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = remove_tap(module, brew_path, tap)
if failed:
break
if changed:
removed += 1
else:
unchanged += 1
if failed:
msg = 'removed: %d, unchanged: %d, error: ' + msg
msg = msg % (removed, unchanged)
elif removed:
changed = True
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
else:
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
return (failed, changed, msg)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['tap'], type='list', required=True),
url=dict(default=None, required=False),
state=dict(default='present', choices=['present', 'absent']),
),
supports_check_mode=True,
)
brew_path = module.get_bin_path(
'brew',
required=True,
opt_dirs=['/usr/local/bin']
)
taps = module.params['name']
url = module.params['url']
if module.params['state'] == 'present':
if url is None:
# No tap URL provided explicitly, continue with bulk addition
# of all the taps.
failed, changed, msg = add_taps(module, brew_path, taps)
else:
# When an tap URL is provided explicitly, we allow adding
# *single* tap only. Validate and proceed to add single tap.
if len(taps) > 1:
msg = "List of muliple taps may not be provided with 'url' option."
module.fail_json(msg=msg)
else:
failed, changed, msg = add_tap(module, brew_path, taps[0], url)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
elif module.params['state'] == 'absent':
failed, changed, msg = remove_taps(module, brew_path, taps)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
andreaso/ansible
|
lib/ansible/modules/packaging/os/homebrew_tap.py
|
Python
|
gpl-3.0
| 7,344
|
"""
This file tests some of the YAML files in the maxout paper
"""
import os
import pylearn2
from pylearn2.datasets import control
from pylearn2.datasets.mnist import MNIST
from pylearn2.termination_criteria import EpochCounter
from pylearn2.testing.skip import skip_if_no_gpu
from pylearn2.utils.serial import load_train_file
def test_mnist():
"""
Test the mnist.yaml file from the maxout
paper on random input
"""
skip_if_no_gpu()
train = load_train_file(os.path.join(pylearn2.__path__[0],
"scripts/papers/maxout/mnist.yaml"))
# Load fake MNIST data
init_value = control.load_data
control.load_data = [False]
train.dataset = MNIST(which_set='train',
axes=['c', 0, 1, 'b'], start=0, stop=100)
train.algorithm._set_monitoring_dataset(train.dataset)
control.load_data = init_value
# Train shortly and prevent saving
train.algorithm.termination_criterion = EpochCounter(max_epochs=1)
train.extensions.pop(0)
train.save_freq = 0
train.main_loop()
def test_mnist_pi():
"""
Test the mnist_pi.yaml file from the maxout
paper on random input
"""
train = load_train_file(
os.path.join(pylearn2.__path__[0],
"scripts/papers/maxout/mnist_pi.yaml")
)
# Load fake MNIST data
init_value = control.load_data
control.load_data = [False]
train.dataset = MNIST(which_set='train', start=0, stop=100)
train.algorithm._set_monitoring_dataset(train.dataset)
control.load_data = init_value
# Train shortly and prevent saving
train.algorithm.termination_criterion = EpochCounter(max_epochs=1)
train.extensions.pop(0)
train.save_freq = 0
train.main_loop()
|
fyffyt/pylearn2
|
pylearn2/scripts/papers/maxout/tests/test_mnist.py
|
Python
|
bsd-3-clause
| 1,773
|
#!/usr/bin/python
# Copyright 2017 Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_healthcheck
version_added: "2.4"
short_description: Create, Update or Destroy a Healthcheck.
description:
- Create, Update or Destroy a Healthcheck. Currently only HTTP and
HTTPS Healthchecks are supported. Healthchecks are used to monitor
individual instances, managed instance groups and/or backend
services. Healtchecks are reusable.
- Visit
U(https://cloud.google.com/compute/docs/load-balancing/health-checks)
for an overview of Healthchecks on GCP.
- See
U(https://cloud.google.com/compute/docs/reference/latest/httpHealthChecks) for
API details on HTTP Healthchecks.
- See
U(https://cloud.google.com/compute/docs/reference/latest/httpsHealthChecks)
for more details on the HTTPS Healtcheck API.
requirements:
- "python >= 2.6"
- "google-api-python-client >= 1.6.2"
- "google-auth >= 0.9.0"
- "google-auth-httplib2 >= 0.0.2"
notes:
- Only supports HTTP and HTTPS Healthchecks currently.
author:
- "Tom Melendez (@supertom) <tom@supertom.com>"
options:
check_interval:
description:
- How often (in seconds) to send a health check.
required: false
default: 5
healthcheck_name:
description:
- Name of the Healthcheck.
required: true
healthcheck_type:
description:
- Type of Healthcheck.
required: true
choices: ["HTTP", "HTTPS"]
host_header:
description:
- The value of the host header in the health check request. If left
empty, the public IP on behalf of which this health
check is performed will be used.
required: true
default: ""
port:
description:
- The TCP port number for the health check request. The default value is
443 for HTTPS and 80 for HTTP.
required: false
request_path:
description:
- The request path of the HTTPS health check request.
required: false
default: "/"
state:
description: State of the Healthcheck.
required: true
choices: ["present", "absent"]
timeout:
description:
- How long (in seconds) to wait for a response before claiming
failure. It is invalid for timeout
to have a greater value than check_interval.
required: false
default: 5
unhealthy_threshold:
description:
- A so-far healthy instance will be marked unhealthy after this
many consecutive failures.
required: false
default: 2
healthy_threshold:
description:
- A so-far unhealthy instance will be marked healthy after this
many consecutive successes.
required: false
default: 2
service_account_email:
description:
- service account email
required: false
default: null
service_account_permissions:
version_added: "2.0"
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
required: false
default: null
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
credentials_file:
description:
- Path to the JSON file associated with the service account email
default: null
required: false
project_id:
description:
- Your GCP project ID
required: false
default: null
'''
EXAMPLES = '''
- name: Create Minimum HealthCheck
gcp_healthcheck:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
healthcheck_name: my-healthcheck
healthcheck_type: HTTP
state: present
- name: Create HTTP HealthCheck
gcp_healthcheck:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
healthcheck_name: my-healthcheck
healthcheck_type: HTTP
host: my-host
request_path: /hc
check_interval: 10
timeout: 30
unhealthy_threshhold: 2
healthy_threshhold: 1
state: present
- name: Create HTTPS HealthCheck
gcp_healthcheck:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
healthcheck_name: "{{ https_healthcheck }}"
healthcheck_type: HTTPS
host_header: my-host
request_path: /hc
check_interval: 5
timeout: 5
unhealthy_threshold: 2
healthy_threshold: 1
state: present
'''
RETURN = '''
state:
description: state of the Healthcheck
returned: Always.
type: str
sample: present
healthcheck_name:
description: Name of the Healthcheck
returned: Always
type: str
sample: my-url-map
healthcheck_type:
description: Type of the Healthcheck
returned: Always
type: str
sample: HTTP
healthcheck:
description: GCP Healthcheck dictionary
returned: Always. Refer to GCP documentation for detailed field descriptions.
type: dict
sample: { "name": "my-hc", "port": 443, "requestPath": "/foo" }
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gcp import get_google_api_client, GCPUtils
USER_AGENT_PRODUCT = 'ansible-healthcheck'
USER_AGENT_VERSION = '0.0.1'
def _validate_healthcheck_params(params):
"""
Validate healthcheck params.
Simple validation has already assumed by AnsibleModule.
:param params: Ansible dictionary containing configuration.
:type params: ``dict``
:return: True or raises ValueError
:rtype: ``bool`` or `class:ValueError`
"""
if params['timeout'] > params['check_interval']:
raise ValueError("timeout (%s) is greater than check_interval (%s)" % (
params['timeout'], params['check_interval']))
return (True, '')
def _build_healthcheck_dict(params):
"""
Reformat services in Ansible Params for GCP.
:param params: Params from AnsibleModule object
:type params: ``dict``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: dictionary suitable for submission to GCP
HealthCheck (HTTP/HTTPS) API.
:rtype ``dict``
"""
gcp_dict = GCPUtils.params_to_gcp_dict(params, 'healthcheck_name')
if 'timeout' in gcp_dict:
gcp_dict['timeoutSec'] = gcp_dict['timeout']
del gcp_dict['timeout']
if 'checkInterval' in gcp_dict:
gcp_dict['checkIntervalSec'] = gcp_dict['checkInterval']
del gcp_dict['checkInterval']
if 'hostHeader' in gcp_dict:
gcp_dict['host'] = gcp_dict['hostHeader']
del gcp_dict['hostHeader']
if 'healthcheckType' in gcp_dict:
del gcp_dict['healthcheckType']
return gcp_dict
def _get_req_resource(client, resource_type):
if resource_type == 'HTTPS':
return (client.httpsHealthChecks(), 'httpsHealthCheck')
else:
return (client.httpHealthChecks(), 'httpHealthCheck')
def get_healthcheck(client, name, project_id=None, resource_type='HTTP'):
"""
Get a Healthcheck from GCP.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: A dict resp from the respective GCP 'get' request.
:rtype: ``dict``
"""
try:
resource, entity_name = _get_req_resource(client, resource_type)
args = {'project': project_id, entity_name: name}
req = resource.get(**args)
return GCPUtils.execute_api_client_req(req, raise_404=False)
except:
raise
def create_healthcheck(client, params, project_id, resource_type='HTTP'):
"""
Create a new Healthcheck.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param params: Dictionary of arguments from AnsibleModule.
:type params: ``dict``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
gcp_dict = _build_healthcheck_dict(params)
try:
resource, _ = _get_req_resource(client, resource_type)
args = {'project': project_id, 'body': gcp_dict}
req = resource.insert(**args)
return_data = GCPUtils.execute_api_client_req(req, client, raw=False)
if not return_data:
return_data = get_healthcheck(client,
name=params['healthcheck_name'],
project_id=project_id)
return (True, return_data)
except:
raise
def delete_healthcheck(client, name, project_id, resource_type='HTTP'):
"""
Delete a Healthcheck.
:param client: An initialized GCE Compute Disover resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
try:
resource, entity_name = _get_req_resource(client, resource_type)
args = {'project': project_id, entity_name: name}
req = resource.delete(**args)
return_data = GCPUtils.execute_api_client_req(req, client)
return (True, return_data)
except:
raise
def update_healthcheck(client, healthcheck, params, name, project_id,
resource_type='HTTP'):
"""
Update a Healthcheck.
If the healthcheck has not changed, the update will not occur.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param healthcheck: Name of the Url Map.
:type healthcheck: ``dict``
:param params: Dictionary of arguments from AnsibleModule.
:type params: ``dict``
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
gcp_dict = _build_healthcheck_dict(params)
ans = GCPUtils.are_params_equal(healthcheck, gcp_dict)
if ans:
return (False, 'no update necessary')
try:
resource, entity_name = _get_req_resource(client, resource_type)
args = {'project': project_id, entity_name: name, 'body': gcp_dict}
req = resource.update(**args)
return_data = GCPUtils.execute_api_client_req(
req, client=client, raw=False)
return (True, return_data)
except:
raise
def main():
module = AnsibleModule(argument_spec=dict(
healthcheck_name=dict(required=True),
healthcheck_type=dict(required=True,
choices=['HTTP', 'HTTPS']),
request_path=dict(required=False, default='/'),
check_interval=dict(required=False, type='int', default=5),
healthy_threshold=dict(required=False, type='int', default=2),
unhealthy_threshold=dict(required=False, type='int', default=2),
host_header=dict(required=False, type='str', default=''),
timeout=dict(required=False, type='int', default=5),
port=dict(required=False, type='int'),
state=dict(choices=['absent', 'present'], default='present'),
service_account_email=dict(),
service_account_permissions=dict(type='list'),
credentials_file=dict(),
project_id=dict(), ), )
client, conn_params = get_google_api_client(module, 'compute', user_agent_product=USER_AGENT_PRODUCT,
user_agent_version=USER_AGENT_VERSION)
params = {}
params['healthcheck_name'] = module.params.get('healthcheck_name')
params['healthcheck_type'] = module.params.get('healthcheck_type')
params['request_path'] = module.params.get('request_path')
params['check_interval'] = module.params.get('check_interval')
params['healthy_threshold'] = module.params.get('healthy_threshold')
params['unhealthy_threshold'] = module.params.get('unhealthy_threshold')
params['host_header'] = module.params.get('host_header')
params['timeout'] = module.params.get('timeout')
params['port'] = module.params.get('port', None)
params['state'] = module.params.get('state')
if not params['port']:
params['port'] = 80
if params['healthcheck_type'] == 'HTTPS':
params['port'] = 443
try:
_validate_healthcheck_params(params)
except Exception as e:
module.fail_json(msg=e.message, changed=False)
changed = False
json_output = {'state': params['state']}
healthcheck = get_healthcheck(client,
name=params['healthcheck_name'],
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
if not healthcheck:
if params['state'] == 'absent':
# Doesn't exist in GCE, and state==absent.
changed = False
module.fail_json(
msg="Cannot delete unknown healthcheck: %s" %
(params['healthcheck_name']))
else:
# Create
changed, json_output['healthcheck'] = create_healthcheck(client,
params=params,
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
elif params['state'] == 'absent':
# Delete
changed, json_output['healthcheck'] = delete_healthcheck(client,
name=params['healthcheck_name'],
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
else:
changed, json_output['healthcheck'] = update_healthcheck(client,
healthcheck=healthcheck,
params=params,
name=params['healthcheck_name'],
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
json_output['changed'] = changed
json_output.update(params)
module.exit_json(**json_output)
if __name__ == '__main__':
main()
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/google/gcp_healthcheck.py
|
Python
|
bsd-3-clause
| 15,554
|
import re
import subprocess
def remove_long_path():
path = 'mtrand.c'
pat = re.compile(r'"[^"]*mtrand\.pyx"')
code = open(path).read()
code = pat.sub(r'"mtrand.pyx"', code)
open(path, 'w').write(code)
def main():
subprocess.check_call(['cython', 'mtrand.pyx'])
remove_long_path()
if __name__ == '__main__':
main()
|
numpy/numpy-refactor
|
numpy/random/mtrand/generate_mtrand_c.py
|
Python
|
bsd-3-clause
| 352
|
# ===--- SwiftIntTypes.py ----------------------------*- coding: utf-8 -*-===//
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
# Bit counts for all int types
_all_integer_type_bitwidths = [8, 16, 32, 64]
# Number of bits in the biggest int type
int_max_bits = max(_all_integer_type_bitwidths)
def int_max(bits, signed):
bits = bits - 1 if signed else bits
bits = max(bits, 0)
return (1 << bits) - 1
def int_min(bits, signed):
return (-1 * int_max(bits, signed) - 1) if signed else 0
class SwiftIntegerType(object):
def __init__(self, is_word, bits, is_signed):
self.is_word = is_word
self.bits = bits
self.is_signed = is_signed
if is_word:
self.possible_bitwidths = [32, 64]
else:
self.possible_bitwidths = [bits]
self.min = int_min(bits, is_signed)
self.max = int_max(bits, is_signed)
# Derived properties
self.stdlib_name = \
('' if is_signed else 'U') + \
'Int' + \
('' if is_word else str(bits))
self.builtin_name = 'Int' + str(bits)
def get_opposite_signedness(self):
return SwiftIntegerType(self.is_word, self.bits, not self.is_signed)
def __eq__(self, other):
return self.is_word == other.is_word and \
self.bits == other.bits and \
self.is_signed == other.is_signed
def __ne__(self, other):
return not self.__eq__(other)
def all_integer_types(word_bits):
for bitwidth in _all_integer_type_bitwidths:
for is_signed in [False, True]:
yield SwiftIntegerType(
is_word=False, bits=bitwidth,
is_signed=is_signed)
for is_signed in [False, True]:
yield SwiftIntegerType(
is_word=True, bits=word_bits,
is_signed=is_signed)
# 'truncatingBitPattern' initializer is defined if the conversion is truncating
# on any platform that Swift supports.
def should_define_truncating_bit_pattern_init(src_ty, dst_ty):
# Don't define a truncating conversion between a type and itself.
if src_ty == dst_ty:
return False
# Conversion to opposite signedness is never truncating.
if src_ty == dst_ty.get_opposite_signedness():
return False
for src_ty_bits in src_ty.possible_bitwidths:
for dst_ty_bits in dst_ty.possible_bitwidths:
if src_ty_bits > dst_ty_bits:
return True
return False
def all_integer_type_names():
return [self_ty.stdlib_name for self_ty in all_integer_types(0)]
def all_real_number_type_names():
# FIXME , 'Float80' Revert until I figure out a test failure # Float80
# for i386 & x86_64
return ['Float', 'Double']
def all_numeric_type_names():
return all_integer_type_names() + all_real_number_type_names()
def numeric_type_names_macintosh_only():
return ['Float80']
# Swift_Programming_Language/Expressions.html
def all_integer_binary_operator_names():
return ['%', '<<', '>>', '&*', '&', '&+', '&-', '|', '^']
def all_integer_or_real_binary_operator_names():
return ['*', '/', '+', '-', '..<', '...']
def all_integer_assignment_operator_names():
return ['%=', '<<=', '>>=', '&=', '^=', '|=']
def all_integer_or_real_assignment_operator_names():
return ['=', '*=', '/=', '+=', '-=']
|
aschwaighofer/swift
|
utils/SwiftIntTypes.py
|
Python
|
apache-2.0
| 3,655
|
from __future__ import print_function, division
from .cartan_type import Standard_Cartan
from sympy.core.compatibility import range
from sympy.matrices import eye
class TypeB(Standard_Cartan):
def __new__(cls, n):
if n < 2:
raise ValueError("n can not be less than 2")
return Standard_Cartan.__new__(cls, "B", n)
def dimension(self):
"""Dimension of the vector space V underlying the Lie algebra
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType("B3")
>>> c.dimension()
3
"""
return self.n
def basic_root(self, i, j):
"""
This is a method just to generate roots
with a 1 iin the ith position and a -1
in the jth postion.
"""
root = [0]*self.n
root[i] = 1
root[j] = -1
return root
def simple_root(self, i):
"""
Every lie algebra has a unique root system.
Given a root system Q, there is a subset of the
roots such that an element of Q is called a
simple root if it cannot be written as the sum
of two elements in Q. If we let D denote the
set of simple roots, then it is clear that every
element of Q can be written as a linear combination
of elements of D with all coefficients non-negative.
In B_n the first n-1 simple roots are the same as the
roots in A_(n-1) (a 1 in the ith position, a -1 in
the (i+1)th position, and zeroes elsewhere). The n-th
simple root is the root with a 1 in the nth position
and zeroes elsewhere.
This method returns the ith simple root for the B series.
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType("B3")
>>> c.simple_root(2)
[0, 1, -1]
"""
n = self.n
if i < n:
return self.basic_root(i-1, i)
else:
root = [0]*self.n
root[n-1] = 1
return root
def positive_roots(self):
"""
This method generates all the positive roots of
A_n. This is half of all of the roots of B_n;
by multiplying all the positive roots by -1 we
get the negative roots.
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType("A3")
>>> c.positive_roots()
{1: [1, -1, 0, 0], 2: [1, 0, -1, 0], 3: [1, 0, 0, -1], 4: [0, 1, -1, 0],
5: [0, 1, 0, -1], 6: [0, 0, 1, -1]}
"""
n = self.n
posroots = {}
k = 0
for i in range(0, n-1):
for j in range(i+1, n):
k += 1
posroots[k] = self.basic_root(i, j)
k += 1
root = self.basic_root(i, j)
root[j] = 1
posroots[k] = root
for i in range(0, n):
k += 1
root = [0]*n
root[i] = 1
posroots[k] = root
return posroots
def roots(self):
"""
Returns the total number of roots for B_n"
"""
n = self.n
return 2*(n**2)
def cartan_matrix(self):
"""
Returns the Cartan matrix for B_n.
The Cartan matrix matrix for a Lie algebra is
generated by assigning an ordering to the simple
roots, (alpha[1], ...., alpha[l]). Then the ijth
entry of the Cartan matrix is (<alpha[i],alpha[j]>).
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType('B4')
>>> c.cartan_matrix()
Matrix([
[ 2, -1, 0, 0],
[-1, 2, -1, 0],
[ 0, -1, 2, -2],
[ 0, 0, -1, 2]])
"""
n = self.n
m = 2* eye(n)
i = 1
while i < n-1:
m[i, i+1] = -1
m[i, i-1] = -1
i += 1
m[0, 1] = -1
m[n-2, n-1] = -2
m[n-1, n-2] = -1
return m
def basis(self):
"""
Returns the number of independent generators of B_n
"""
n = self.n
return (n**2 - n)/2
def lie_algebra(self):
"""
Returns the Lie algebra associated with B_n
"""
n = self.n
return "so(" + str(2*n) + ")"
def dynkin_diagram(self):
n = self.n
diag = "---".join("0" for i in range(1, n)) + "=>=0\n"
diag += " ".join(str(i) for i in range(1, n+1))
return diag
|
Davidjohnwilson/sympy
|
sympy/liealgebras/type_b.py
|
Python
|
bsd-3-clause
| 4,651
|
def func():
import module
module
# <ref>
|
ahb0327/intellij-community
|
python/testData/addImport/localImportInlineFunctionBody.after.py
|
Python
|
apache-2.0
| 58
|
# Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint
from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class SHFILEOPSTRUCTW(Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
if not isinstance(path, text_type):
path = text_type(path, 'mbcs')
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg)
|
boxed/CMi
|
web_frontend/send2trash/plat_win.py
|
Python
|
mit
| 1,660
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import itertools
import tempfile
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
callable_default_counter = itertools.count()
callable_default = lambda: next(callable_default_counter)
temp_storage = FileSystemStorage(location=tempfile.mkdtemp())
class BoundaryModel(models.Model):
positive_integer = models.PositiveIntegerField(null=True, blank=True)
class Defaults(models.Model):
name = models.CharField(max_length=255, default='class default value')
def_date = models.DateField(default=datetime.date(1980, 1, 1))
value = models.IntegerField(default=42)
callable_default = models.IntegerField(default=callable_default)
class ChoiceModel(models.Model):
"""For ModelChoiceField and ModelMultipleChoiceField tests."""
CHOICES = [
('', 'No Preference'),
('f', 'Foo'),
('b', 'Bar'),
]
INTEGER_CHOICES = [
(None, 'No Preference'),
(1, 'Foo'),
(2, 'Bar'),
]
STRING_CHOICES_WITH_NONE = [
(None, 'No Preference'),
('f', 'Foo'),
('b', 'Bar'),
]
name = models.CharField(max_length=10)
choice = models.CharField(max_length=2, blank=True, choices=CHOICES)
choice_string_w_none = models.CharField(
max_length=2, blank=True, null=True, choices=STRING_CHOICES_WITH_NONE)
choice_integer = models.IntegerField(choices=INTEGER_CHOICES, blank=True,
null=True)
@python_2_unicode_compatible
class ChoiceOptionModel(models.Model):
"""Destination for ChoiceFieldModel's ForeignKey.
Can't reuse ChoiceModel because error_message tests require that it have no instances."""
name = models.CharField(max_length=10)
class Meta:
ordering = ('name',)
def __str__(self):
return 'ChoiceOption %d' % self.pk
class ChoiceFieldModel(models.Model):
"""Model with ForeignKey to another model, for testing ModelForm
generation with ModelChoiceField."""
choice = models.ForeignKey(ChoiceOptionModel, blank=False,
default=lambda: ChoiceOptionModel.objects.get(name='default'))
choice_int = models.ForeignKey(ChoiceOptionModel, blank=False, related_name='choice_int',
default=lambda: 1)
multi_choice = models.ManyToManyField(ChoiceOptionModel, blank=False, related_name='multi_choice',
default=lambda: ChoiceOptionModel.objects.filter(name='default'))
multi_choice_int = models.ManyToManyField(ChoiceOptionModel, blank=False, related_name='multi_choice_int',
default=lambda: [1])
class OptionalMultiChoiceModel(models.Model):
multi_choice = models.ManyToManyField(ChoiceOptionModel, blank=False, related_name='not_relevant',
default=lambda: ChoiceOptionModel.objects.filter(name='default'))
multi_choice_optional = models.ManyToManyField(ChoiceOptionModel, blank=True,
related_name='not_relevant2')
class FileModel(models.Model):
file = models.FileField(storage=temp_storage, upload_to='tests')
@python_2_unicode_compatible
class Group(models.Model):
name = models.CharField(max_length=10)
def __str__(self):
return '%s' % self.name
class Cheese(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
content = models.TextField()
|
hackerbot/DjangoDev
|
tests/forms_tests/models.py
|
Python
|
bsd-3-clause
| 3,657
|
from django.conf import settings
from django.core.handlers.base import get_path_info
from django.core.handlers.wsgi import WSGIHandler
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.six.moves.urllib.request import url2pathname
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.views import serve
class StaticFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to the static files directory, as
defined by the STATIC_URL setting, and serves those files.
"""
def __init__(self, application, base_dir=None):
self.application = application
if base_dir:
self.base_dir = base_dir
else:
self.base_dir = self.get_base_dir()
self.base_url = urlparse(self.get_base_url())
super(StaticFilesHandler, self).__init__()
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
utils.check_settings()
return settings.STATIC_URL
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Returns the relative path to the media file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def serve(self, request):
"""
Actually serves the request path.
"""
return serve(request, self.file_path(request.path), insecure=True)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404 as e:
if settings.DEBUG:
from django.views import debug
return debug.technical_404_response(request, e)
return super(StaticFilesHandler, self).get_response(request)
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super(StaticFilesHandler, self).__call__(environ, start_response)
|
edisonlz/fruit
|
web_project/base/site-packages/django/contrib/staticfiles/handlers.py
|
Python
|
apache-2.0
| 2,440
|
# Copyright (c) 2011, Apple Inc. All rights reserved.
# Copyright (c) 2009, 2011, 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# WebKit's Python module for committer and reviewer validation.
import fnmatch
import json
from webkitpy.common.editdistance import edit_distance
from webkitpy.common.memoized import memoized
from webkitpy.common.system.filesystem import FileSystem
# The list of contributors have been moved to contributors.json
class Contributor(object):
def __init__(self, name, email_or_emails, irc_nickname_or_nicknames=None):
assert(name)
assert(email_or_emails)
self.full_name = name
if isinstance(email_or_emails, str):
self.emails = [email_or_emails]
else:
self.emails = email_or_emails
self.emails = map(lambda email: email.lower(), self.emails) # Emails are case-insensitive.
if isinstance(irc_nickname_or_nicknames, str):
self.irc_nicknames = [irc_nickname_or_nicknames]
else:
self.irc_nicknames = irc_nickname_or_nicknames
self.can_commit = False
self.can_review = False
def bugzilla_email(self):
# FIXME: We're assuming the first email is a valid bugzilla email,
# which might not be right.
return self.emails[0]
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
return '"%s" <%s>' % (self.full_name, self.emails[0])
def contains_string(self, search_string):
string = search_string.lower()
if string in self.full_name.lower():
return True
if self.irc_nicknames:
for nickname in self.irc_nicknames:
if string in nickname.lower():
return True
for email in self.emails:
if string in email:
return True
return False
def matches_glob(self, glob_string):
if fnmatch.fnmatch(self.full_name, glob_string):
return True
if self.irc_nicknames:
for nickname in self.irc_nicknames:
if fnmatch.fnmatch(nickname, glob_string):
return True
for email in self.emails:
if fnmatch.fnmatch(email, glob_string):
return True
return False
class Committer(Contributor):
def __init__(self, name, email_or_emails, irc_nickname=None):
Contributor.__init__(self, name, email_or_emails, irc_nickname)
self.can_commit = True
class Reviewer(Committer):
def __init__(self, name, email_or_emails, irc_nickname=None):
Committer.__init__(self, name, email_or_emails, irc_nickname)
self.can_review = True
class CommitterList(object):
# Committers and reviewers are passed in to allow easy testing
def __init__(self,
committers=[],
reviewers=[],
contributors=[]):
# FIXME: These arguments only exist for testing. Clean it up.
if not (committers or reviewers or contributors):
loaded_data = self.load_json()
contributors = loaded_data['Contributors']
committers = loaded_data['Committers']
reviewers = loaded_data['Reviewers']
self._contributors = contributors + committers + reviewers
self._committers = committers + reviewers
self._reviewers = reviewers
self._contributors_by_name = {}
self._accounts_by_email = {}
self._accounts_by_login = {}
@staticmethod
@memoized
def load_json():
filesystem = FileSystem()
json_path = filesystem.join(filesystem.dirname(filesystem.path_to_module('webkitpy.common.config')), 'contributors.json')
contributors = json.loads(filesystem.read_text_file(json_path))
return {
'Contributors': [Contributor(name, data.get('emails'), data.get('nicks')) for name, data in contributors['Contributors'].iteritems()],
'Committers': [Committer(name, data.get('emails'), data.get('nicks')) for name, data in contributors['Committers'].iteritems()],
'Reviewers': [Reviewer(name, data.get('emails'), data.get('nicks')) for name, data in contributors['Reviewers'].iteritems()],
}
def contributors(self):
return self._contributors
def committers(self):
return self._committers
def reviewers(self):
return self._reviewers
def _name_to_contributor_map(self):
if not len(self._contributors_by_name):
for contributor in self._contributors:
assert(contributor.full_name)
assert(contributor.full_name.lower() not in self._contributors_by_name) # We should never have duplicate names.
self._contributors_by_name[contributor.full_name.lower()] = contributor
return self._contributors_by_name
def _email_to_account_map(self):
if not len(self._accounts_by_email):
for account in self._contributors:
for email in account.emails:
assert(email not in self._accounts_by_email) # We should never have duplicate emails.
self._accounts_by_email[email] = account
return self._accounts_by_email
def _login_to_account_map(self):
if not len(self._accounts_by_login):
for account in self._contributors:
if account.emails:
login = account.bugzilla_email()
assert(login not in self._accounts_by_login) # We should never have duplicate emails.
self._accounts_by_login[login] = account
return self._accounts_by_login
def _committer_only(self, record):
if record and not record.can_commit:
return None
return record
def _reviewer_only(self, record):
if record and not record.can_review:
return None
return record
def committer_by_name(self, name):
return self._committer_only(self.contributor_by_name(name))
def contributor_by_irc_nickname(self, irc_nickname):
for contributor in self.contributors():
# FIXME: This should do case-insensitive comparison or assert that all IRC nicknames are in lowercase
if contributor.irc_nicknames and irc_nickname in contributor.irc_nicknames:
return contributor
return None
def contributors_by_search_string(self, string):
glob_matches = filter(lambda contributor: contributor.matches_glob(string), self.contributors())
return glob_matches or filter(lambda contributor: contributor.contains_string(string), self.contributors())
def contributors_by_email_username(self, string):
string = string + '@'
result = []
for contributor in self.contributors():
for email in contributor.emails:
if email.startswith(string):
result.append(contributor)
break
return result
def _contributor_name_shorthands(self, contributor):
if ' ' not in contributor.full_name:
return []
split_fullname = contributor.full_name.split()
first_name = split_fullname[0]
last_name = split_fullname[-1]
return first_name, last_name, first_name + last_name[0], first_name + ' ' + last_name[0]
def _tokenize_contributor_name(self, contributor):
full_name_in_lowercase = contributor.full_name.lower()
tokens = [full_name_in_lowercase] + full_name_in_lowercase.split()
if contributor.irc_nicknames:
return tokens + [nickname.lower() for nickname in contributor.irc_nicknames if len(nickname) > 5]
return tokens
def contributors_by_fuzzy_match(self, string):
string_in_lowercase = string.lower()
# 1. Exact match for fullname, email and irc_nicknames
account = self.contributor_by_name(string_in_lowercase) or self.contributor_by_email(string_in_lowercase) or self.contributor_by_irc_nickname(string_in_lowercase)
if account:
return [account], 0
# 2. Exact match for email username (before @)
accounts = self.contributors_by_email_username(string_in_lowercase)
if accounts and len(accounts) == 1:
return accounts, 0
# 3. Exact match for first name, last name, and first name + initial combinations such as "Dan B" and "Tim H"
accounts = [contributor for contributor in self.contributors() if string in self._contributor_name_shorthands(contributor)]
if accounts and len(accounts) == 1:
return accounts, 0
# 4. Finally, fuzzy-match using edit-distance
string = string_in_lowercase
contributorWithMinDistance = []
minDistance = len(string) / 2 - 1
for contributor in self.contributors():
tokens = self._tokenize_contributor_name(contributor)
editdistances = [edit_distance(token, string) for token in tokens if abs(len(token) - len(string)) <= minDistance]
if not editdistances:
continue
distance = min(editdistances)
if distance == minDistance:
contributorWithMinDistance.append(contributor)
elif distance < minDistance:
contributorWithMinDistance = [contributor]
minDistance = distance
if not len(contributorWithMinDistance):
return [], len(string)
return contributorWithMinDistance, minDistance
def contributor_by_email(self, email):
return self._email_to_account_map().get(email.lower()) if email else None
def contributor_by_name(self, name):
return self._name_to_contributor_map().get(name.lower()) if name else None
def committer_by_email(self, email):
return self._committer_only(self.contributor_by_email(email))
def reviewer_by_email(self, email):
return self._reviewer_only(self.contributor_by_email(email))
|
klim-iv/phantomjs-qt5
|
src/webkit/Tools/Scripts/webkitpy/common/config/committers.py
|
Python
|
bsd-3-clause
| 11,526
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class mrp_workcenter_load(osv.osv_memory):
_name = 'mrp.workcenter.load'
_description = 'Work Center Load'
_columns = {
'time_unit': fields.selection([('day', 'Day by day'),('week', 'Per week'),('month', 'Per month')],'Type of period', required=True),
'measure_unit': fields.selection([('hours', 'Amount in hours'),('cycles', 'Amount in cycles')],'Amount measuring unit', required=True),
}
def print_report(self, cr, uid, ids, context=None):
""" To print the report of Work Center Load
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Report
"""
if context is None:
context = {}
datas = {'ids' : context.get('active_ids',[])}
res = self.read(cr, uid, ids, ['time_unit','measure_unit'])
res = res and res[0] or {}
datas['form'] = res
return {
'type' : 'ir.actions.report.xml',
'report_name':'mrp.workcenter.load',
'datas' : datas,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
diogocs1/comps
|
web/addons/mrp/wizard/mrp_workcenter_load.py
|
Python
|
apache-2.0
| 2,222
|
# -*- coding: utf-8 -*-
"""
pygments.styles.vs
~~~~~~~~~~~~~~~~~~
Simple style with MS Visual Studio colors.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class VisualStudioStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "#008000",
Comment.Preproc: "#0000ff",
Keyword: "#0000ff",
Operator.Word: "#0000ff",
Keyword.Type: "#2b91af",
Name.Class: "#2b91af",
String: "#a31515",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
}
|
emineKoc/WiseWit
|
wisewit_front_end/node_modules/pygmentize-bundled/vendor/pygments/pygments/styles/vs.py
|
Python
|
gpl-3.0
| 1,073
|
import socket
import subprocess
import sys
import time
import h11
import pytest
import requests
@pytest.fixture
def turq_instance():
return TurqInstance()
class TurqInstance:
"""Spins up and controls a live instance of Turq for testing."""
def __init__(self):
self.host = 'localhost'
# Test instance listens on port 13095 instead of the default 13085,
# to make it easier to run tests while also testing Turq manually.
# Of course, ideally it should be a random free port instead.
self.mock_port = 13095
self.editor_port = 13096
self.password = ''
self.extra_args = []
self.wait = True
self._process = None
self.console_output = None
def __enter__(self):
args = [sys.executable, '-m', 'turq.main',
'--bind', self.host, '--mock-port', str(self.mock_port),
'--editor-port', str(self.editor_port)]
if self.password is not None:
args += ['--editor-password', self.password]
args += self.extra_args
self._process = subprocess.Popen(args, stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE)
if self.wait:
self._wait_for_server()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._process.terminate()
self._process.wait()
self.console_output = self._process.stderr.read().decode()
return False
def _wait_for_server(self, timeout=3):
# Wait until the mock server starts accepting connections,
# but no more than `timeout` seconds.
t0 = time.monotonic()
while time.monotonic() - t0 < timeout:
time.sleep(0.1)
try:
self.connect().close()
self.connect_editor().close()
return
except OSError:
pass
raise RuntimeError('Turq failed to start')
def connect(self):
return socket.create_connection((self.host, self.mock_port), timeout=5)
def connect_editor(self):
return socket.create_connection((self.host, self.editor_port),
timeout=5)
def send(self, *events):
hconn = h11.Connection(our_role=h11.CLIENT)
with self.connect() as sock:
for event in events:
sock.sendall(hconn.send(event))
sock.shutdown(socket.SHUT_WR)
while hconn.their_state is not h11.CLOSED:
event = hconn.next_event()
if event is h11.NEED_DATA:
hconn.receive_data(sock.recv(4096))
elif not isinstance(event, h11.ConnectionClosed):
yield event
def request(self, method, url, **kwargs):
full_url = 'http://%s:%d%s' % (self.host, self.mock_port, url)
return requests.request(method, full_url, **kwargs)
def request_editor(self, method, url, **kwargs):
full_url = 'http://%s:%d%s' % (self.host, self.editor_port, url)
return requests.request(method, full_url, **kwargs)
|
vfaronov/turq
|
tests/conftest.py
|
Python
|
isc
| 3,212
|
#
# This is the library part of Anita, the Automated NetBSD Installation
# and Test Application.
#
import os
import pexpect
import re
import string
import subprocess
import sys
import time
import urllib
import urlparse
__version__='1.43'
# Your preferred NetBSD FTP mirror site.
# This is used only by the obsolete code for getting releases
# by number, not by the recommended method of getting them by URL.
# See http://www.netbsd.org/mirrors/#ftp for the complete list.
netbsd_mirror_url = "ftp://ftp.netbsd.org/pub/NetBSD/"
#netbsd_mirror_url = "ftp://ftp.fi.NetBSD.org/pub/NetBSD/"
arch_qemu_map = {
'i386': 'qemu-system-i386',
'amd64': 'qemu-system-x86_64',
'sparc': 'qemu-system-sparc',
'evbarm-earmv7hf': 'qemu-system-arm',
'sparc64': 'qemu-system-sparc64',
# The following ones don't actually work
'macppc': 'qemu-system-ppc',
}
arch_gxemul_list = ['pmax', 'hpcmips', 'landisk']
arch_simh_list = ['vax']
arch_uae_list = ['amiga']
# External commands we rely on
if os.uname()[0] == 'NetBSD':
makefs = ["makefs", "-t", "cd9660", "-o", "rockridge"]
elif os.uname()[0] == 'FreeBSD':
makefs = ["mkisofs", "-r", "-o"]
elif os.uname()[0] == 'Darwin':
makefs = ["hdiutil", "makehybrid", "-iso", "-o"]
else:
# Linux distributions differ. Ubuntu has genisoimage
# and mkisofs (as an alias of genisoimage); CentOS has
# mkisofs only. Debian 7 has genisoimage only.
if os.path.isfile('/usr/bin/genisoimage'):
makefs = ["genisoimage", "-r", "-o"]
else:
makefs = ["mkisofs", "-r", "-o"]
fnull = open(os.devnull, 'w')
# Return true if the given program (+args) can be successfully run
def try_program(argv):
try:
result = subprocess.call(argv, stdout = fnull, stderr = fnull)
return result == 0
except OSError:
return False
# Create a directory if missing
def mkdir_p(dir):
if not os.path.isdir(dir):
os.makedirs(dir)
# Run a shell command safely and with error checking
def spawn(command, args):
print command, ' '.join(args[1:])
ret = os.spawnvp(os.P_WAIT, command, args)
if ret != 0:
raise RuntimeError("could not run " + command)
# Subclass pexpect.spawn to add logging of expect() calls
class pexpect_spawn_log(pexpect.spawn):
def __init__(self, logf, *args, **kwargs):
self.structured_log_f = logf
return super(pexpect_spawn_log, self).__init__(*args, **kwargs)
def expect(self, pattern, *args, **kwargs):
print >>self.structured_log_f, "expect(" + repr(pattern) + ")"
r = pexpect.spawn.expect(self, pattern, *args, **kwargs)
print >>self.structured_log_f, "match(" + repr(self.match.group(0)) + ")"
return r
# Subclass urllib.FancyURLopener so that we can catch
# HTTP 404 errors
class MyURLopener(urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
raise IOError, 'HTTP error code %d' % errcode
def my_urlretrieve(url, filename):
r = MyURLopener().retrieve(url, filename)
if sys.version_info >= (2, 7, 12):
# Work around https://bugs.python.org/issue27973
urllib.urlcleanup()
return r
# Download a file, cleaning up the partial file if the transfer
# fails or is aborted before completion.
def download_file(file, url, optional = False):
try:
print "Downloading", url + "...",
sys.stdout.flush()
my_urlretrieve(url, file)
print "OK"
sys.stdout.flush()
except IOError, e:
if optional:
print "missing but optional, so that's OK"
else:
print e
sys.stdout.flush()
if os.path.exists(file):
os.unlink(file)
raise
# Create a file of the given size, containing NULs, without holes.
def make_dense_image(fn, size):
f = open(fn, "w")
blocksize = 64 * 1024
while size > 0:
chunk = min(size, blocksize)
f.write("\000" * chunk)
size = size - chunk
f.close()
# Parse a size with optional k/M/G/T suffix and return an integer
def parse_size(size):
m = re.match(r'(\d+)([kMGT])?$', size)
if not m:
raise RuntimeError("%s: invalid size" % size)
size, suffix = m.groups()
mult = dict(k=1024, M=1024**2, G=1024**3, T=1024**4).get(suffix, 1)
return int(size) * mult
# Download "url" to the local file "file". If the file already
# exists locally, do nothing. If "optional" is true, ignore download
# failures and cache the absence of a missing file by creating a marker
# file with the extension ".MISSING".
def download_if_missing_2(url, file, optional = False):
if os.path.exists(file):
return
if os.path.exists(file + ".MISSING"):
return
dir = os.path.dirname(file)
mkdir_p(dir)
try:
download_file(file, url, optional)
except:
if optional:
f = open(file + ".MISSING", "w")
f.close()
else:
raise
# As above, but download a file from the download directory tree
# rooted at "urlbase" into a mirror tree rooted at "dirbase". The
# file name to download is "relfile", which is relative to both roots.
def download_if_missing(urlbase, dirbase, relfile, optional = False):
url = urlbase + relfile
file = os.path.join(dirbase, relfile)
return download_if_missing_2(url, file, optional)
def download_if_missing_3(urlbase, dirbase, relpath, optional = False):
url = urlbase + "/".join(relpath)
file = os.path.join(*([dirbase] + relpath))
return download_if_missing_2(url, file, optional)
# Map a URL to a directory name. No two URLs should map to the same
# directory.
def url2dir(url):
tail = []
def munge(match):
index = string.find("/:+-", match.group())
if index != 0:
tail.append(chr(0x60 + index) + str(match.start()))
return "-"
return "work-" + re.sub("[/:+-]", munge, url) + "+" + "".join(tail)
# Inverse of the above; not used, but included just to show that the
# mapping is invertible and therefore collision-free
class InvalidDir(Exception):
pass
def dir2url(dir):
match = re.match(r"(work-)(.*)\+(.*)", dir)
work, s, tail = match.groups()
if work != 'work-':
raise InvalidDir()
s = re.sub("-", "/", s)
chars = list(s)
while True:
m = re.match(r"([a-z])([0-9]+)", tail)
if not m:
break
c, i = m.groups()
chars[int(i)] = "/:+-"[ord(c) - 0x60]
tail = tail[m.end():]
return "".join(chars)
def check_arch_supported(arch, dist_type):
if arch_qemu_map.get(arch) is None and not arch in (arch_gxemul_list + arch_simh_list + arch_uae_list):
raise RuntimeError(("'%s' is not the name of a " + \
"supported NetBSD port") % arch)
if arch in ['i386', 'amd64', 'amiga'] and dist_type != 'reltree':
raise RuntimeError(("NetBSD/%s must be installed from " +
"a release tree, not an ISO") % arch)
if (arch in ['sparc', 'sparc64', 'vax']) and dist_type != 'iso':
raise RuntimeError(("NetBSD/%s must be installed from " +
"an ISO, not a release tree") % arch)
# Expect any of a set of alternatives. The *args are alternating
# patterns and actions; an action can be a string to be sent
# or a function to be called with no arguments. The alternatives
# will be expected repeatedly until the last one in the list has
# been selected.
def expect_any(child, *args):
# http://stackoverflow.com/questions/11702414/split-a-list-into-half-by-even-and-odd-elements
patterns = args[::2]
actions = args[1::2]
while True:
r = child.expect(list(patterns))
action = actions[r]
if isinstance(action, str):
child.send(action)
else:
action()
if r == len(actions) - 1:
break
# Receive and discard (but log) input from the child or a time
# period of "seconds". This is effectively a delay like
# time.sleep(seconds), but generates more useful log output.
def gather_input(child, seconds):
try:
child.expect("this-should-not-match", seconds)
except pexpect.TIMEOUT:
pass
#############################################################################
# A NetBSD version.
#
# Subclasses should define:
#
# dist_url(self)
# the top-level URL for the machine-dependent download tree where
# the version can be downloaded, for example,
# ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-5.0.2/i386/
#
# mi_url(self)
# The top-level URL for the machine-independent download tree,
# for example, ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-5.0.2/
#
# default_workdir(self)
# a file name component identifying the version, for use in
# constructing a unique, version-specific working directory
#
# arch(self)
# the name of the machine architecture the version is for,
# e.g., i386
def make_item(t):
d = dict(zip(['filename', 'label', 'install'], t[0:3]))
if isinstance(t[3], list):
d['group'] = make_set_dict_list(t[3])
else:
d['optional'] = t[3]
return d
def make_set_dict_list(list_):
return [make_item(t) for t in list_]
def flatten_set_dict_list(list_):
def item2list(item):
group = item.get('group')
if group:
return group
else:
return [item]
return sum([item2list(item) for item in list_], [])
class Version:
# Information about the available installation file sets. As the
# set of sets (sic) has evolved over time, this actually represents
# the union of those sets of sets, in other words, this list should
# contain all currently and historically known sets.
#
# This list is used for to determine
# - Which sets we should attempt to download
# - Which sets we should install by default
#
# Each array element is a tuple of four fields:
# - the file name
# - a regular expression matching the label used by sysinst
# (taking into account that it may differ between sysinst versions)
# - a flag indicating that the set should be installed by default
# - a flag indicating that the set is not present in all versions
#
sets = make_set_dict_list([
[ 'kern-GENERIC', 'Kernel (GENERIC)', 1, 0 ],
[ 'kern-GENERIC.NOACPI', 'Kernel \(GENERIC\.NOACPI\)', 0, 1 ],
[ 'modules', 'Kernel [Mm]odules', 1, 1 ],
[ 'base', 'Base', 1, 0 ],
[ 'etc', '(System)|(System configuration files)|(Configuration files) \(/etc\)', 1, 0 ],
[ 'comp', 'Compiler [Tt]ools', 1, 0 ],
[ 'games', 'Games', 0, 0 ],
[ 'man', '(Online )?Manual [Pp]ages', 0, 0 ],
[ 'misc', 'Miscellaneous', 1, 0 ],
[ 'tests', 'Test programs', 1, 1 ],
[ 'text', 'Text [Pp]rocessing [Tt]ools', 0, 0 ],
[ '_x11', 'X11 sets', 0, [
['xbase', 'X11 base and clients', 0, 1 ],
['xcomp', 'X11 programming', 0, 1 ],
['xetc', 'X11 configuration', 0, 1 ],
['xfont', 'X11 fonts', 0, 1 ],
['xserver', 'X11 servers', 0, 1 ],
]],
[ '_src', 'Source (and debug )?sets', 0, [
['syssrc', 'Kernel sources', 0, 1],
['src', 'Base sources', 0, 1],
['sharesrc', 'Share sources', 0, 1],
['gnusrc', 'GNU sources', 0, 1],
['xsrc', 'X11 sources', 0, 1],
['debug', '(debug sets)|(Debug symbols)', 0, 1],
['xdebug', '(debug X11 sets)|(X11 debug symbols)', 0, 1],
]]
])
flat_sets = flatten_set_dict_list(sets)
def __init__(self, sets = None):
self.tempfiles = []
if sets is not None:
if not any([re.match('kern-', s) for s in sets]):
raise RuntimeError("no kernel set specified")
# Create a Python set containing the names of the NetBSD sets we
# want for O(1) lookup. Yes, the multiple meansings of the word
# "set" here are confusing.
sets_wanted = set(sets)
for required in ['base', 'etc']:
if not required in sets_wanted:
raise RuntimeError("the '%s' set is required", required)
for s in self.flat_sets:
s['install'] = (s['filename'] in sets_wanted)
sets_wanted.discard(s['filename'])
if len(sets_wanted):
raise RuntimeError("no such set: " + sets_wanted.pop())
def set_workdir(self, dir):
self.workdir = dir
# The directory where we mirror files needed for installation
def download_local_mi_dir(self):
return self.workdir + "/download/"
def download_local_arch_dir(self):
return self.download_local_mi_dir() + self.arch() + "/"
# The path to the install ISO image
def iso_path(self):
return os.path.join(self.workdir, self.iso_name())
# The directory for the install floppy images
def floppy_dir(self):
return os.path.join(self.download_local_arch_dir(),
"installation/floppy")
def boot_iso_dir(self):
return os.path.join(self.download_local_arch_dir(),
"installation/cdrom")
def boot_from_default(self):
return None
def scratch_disk(self):
arch = self.arch()
if arch in ['i386', 'amd64', 'sparc64']:
return "wd1d"
elif arch == 'vax':
return "ra1a"
else:
return "sd1c"
def xen_kernel(self):
arch = self.arch()
if arch == 'i386':
return 'netbsd-XEN3PAE_DOMU.gz'
elif arch == 'amd64':
return 'netbsd-XEN3_DOMU.gz'
else:
return None
def xen_install_kernel(self):
arch = self.arch()
if arch == 'i386':
return 'netbsd-INSTALL_XEN3PAE_DOMU.gz'
elif arch == 'amd64':
return 'netbsd-INSTALL_XEN3_DOMU.gz'
else:
return None
# The list of boot floppies we should try downloading;
# not all may actually exist. amd64 currently has five,
# i386 has three, and older versions may have fewer.
# Add a couple extra to accomodate future growth.
def potential_floppies(self):
return ['boot-com1.fs'] + ['boot%i.fs' % i for i in range(2, 8)]
# The list of boot floppies we actually have
def floppies(self):
return [f for f in self.potential_floppies() \
if os.path.exists(os.path.join(self.floppy_dir(), f))]
def boot_isos(self):
return ['boot-com.iso']
def cleanup(self):
for fn in self.tempfiles:
os.unlink(fn)
def set_path(self, setname):
if re.match(r'.*src$', setname):
return ['source', 'sets', setname + '.tgz']
else:
return [self.arch(), 'binary', 'sets', setname + '.tgz']
# Download this release
# The ISO class overrides this to download the ISO only
def download(self):
# Depending on the NetBSD version, there may be two or more
# boot floppies. Treat any floppies past the first two as
# optional files.
if hasattr(self, 'url') and self.url[:7] == 'file://':
mkdir_p(os.path.join(self.workdir, 'download'))
if not os.path.lexists(os.path.join(self.workdir, 'download', self.arch())):
os.symlink(self.url[7:], os.path.join(self.workdir, 'download', self.arch()))
return
if self.arch() == 'evbarm-earmv7hf':
for file in ['netbsd-VEXPRESS_A15.ub.gz']:
download_if_missing_3(self.dist_url(), self.download_local_arch_dir(), ["binary", "kernel", file])
download_if_missing_3(self.dist_url(), self.download_local_arch_dir(), ["binary", "gzimg", "armv7.img.gz"])
return
if self.arch() == 'hpcmips':
download_if_missing_3(self.dist_url(), self.download_local_arch_dir(), ["installation", "netbsd.gz"])
if self.arch() in ['hpcmips', 'landisk']:
download_if_missing_3(self.dist_url(), self.download_local_arch_dir(), ["binary", "kernel", "netbsd-GENERIC.gz"])
if self.arch() == 'amiga':
download_if_missing_3(self.dist_url(), self.download_local_arch_dir(), ["installation", "miniroot", "miniroot.fs.gz"])
i = 0
for floppy in self.potential_floppies():
download_if_missing_3(self.dist_url(),
self.download_local_arch_dir(),
["installation", "floppy", floppy],
True)
i = i + 1
for bootcd in (self.boot_isos()):
download_if_missing_3(self.dist_url(),
self.download_local_arch_dir(),
["installation", "cdrom", bootcd],
True)
# These are used with noemu only
download_if_missing_3(self.dist_url(),
self.download_local_arch_dir(),
["installation", "misc", "pxeboot_ia32.bin"],
True)
download_if_missing_3(self.dist_url(),
self.download_local_arch_dir(),
["binary", "kernel", "netbsd-INSTALL.gz"],
True)
for set in self.flat_sets:
if set['install']:
download_if_missing_3(self.mi_url(),
self.download_local_mi_dir(),
self.set_path(set['filename']),
set['optional'])
# Create an install ISO image to install from
def make_iso(self):
self.download()
spawn(makefs[0], makefs + \
[self.iso_path(), os.path.dirname(os.path.realpath(os.path.join(self.download_local_mi_dir(), self.arch())))])
self.tempfiles.append(self.iso_path())
# Get the architecture name. This is a hardcoded default for use
# by the obsolete subclasses; the "URL" class overrides it.
def arch(self):
return "i386"
# Backwards compatibility with Anita 1.2 and older
def install(self):
Anita(dist = self).install()
def boot(self):
Anita(dist = self).boot()
def interact(self):
Anita(dist = self).interact()
# Subclass for versions where we pass in the version number explicitly
class NumberedVersion(Version):
def __init__(self, ver, **kwargs):
Version.__init__(self, **kwargs)
self.ver = ver
# The file name of the install ISO (sans directory)
def iso_name(self):
if re.match("^[3-9]", self.ver) is not None:
return "i386cd-" + self.ver + ".iso"
else:
return "i386cd.iso"
# The directory for files related to this release
def default_workdir(self):
return "netbsd-" + self.ver
# An official NetBSD release
class Release(NumberedVersion):
def __init__(self, ver, **kwargs):
NumberedVersion.__init__(self, ver, **kwargs)
pass
def mi_url(self):
return netbsd_mirror_url + "NetBSD-" + self.ver + "/"
def dist_url(self):
return self.mi_url() + self.arch() + "/"
# A daily build
class DailyBuild(NumberedVersion):
def __init__(self, branch, timestamp, **kwargs):
ver = re.sub("^netbsd-", "", branch)
NumberedVersion.__init__(self, ver, **kwargs)
self.timestamp = timestamp
def default_workdir(self):
return NumberedVersion.default_workdir(self) + "-" + self.timestamp
def dist_url(self):
branch = re.sub("[\\._]", "-", self.ver)
if re.match("^[0-9]", branch):
branch = "netbsd-" + branch
return "http://ftp.netbsd.org/pub/NetBSD-daily/" + \
branch + "/" + self.timestamp + "/i386/"
# A local build
class LocalBuild(NumberedVersion):
def __init__(self, ver, release_path, **kwargs):
NumberedVersion.__init__(self, ver, **kwargs)
self.release_path = release_path
def dist_url(self):
return "file://" + self.release_path + "/i386/"
# The top-level URL of a release tree
class URL(Version):
def __init__(self, url, **kwargs):
Version.__init__(self, **kwargs)
self.url = url
match = re.match(r'(^.*/)([^/]+)/$', url)
if match is None:
raise RuntimeError(("URL '%s' doesn't look like the URL of a " + \
"NetBSD distribution") % url)
self.url_mi_part = match.group(1)
self.m_arch = match.group(2)
check_arch_supported(self.m_arch, 'reltree')
def dist_url(self):
return self.url
def mi_url(self):
return self.url_mi_part
def iso_name(self):
return "install_tmp.iso"
def default_workdir(self):
return url2dir(self.url)
def arch(self):
return self.m_arch
# A local release directory
class LocalDirectory(URL):
def __init__(self, dir, **kwargs):
# This could be optimized to avoid copying the files
URL.__init__(self, "file://" + dir, **kwargs)
# An URL or local file name pointing at an ISO image
class ISO(Version):
def __init__(self, iso_url, **kwargs):
Version.__init__(self, **kwargs)
if re.match(r'/', iso_url):
self.m_iso_url = "file://" + iso_url
self.m_iso_path = iso_url
else:
self.m_iso_url = iso_url
self.m_iso_path = None
# We can't determine the final ISO file name yet because the work
# directory is not known at this point, but we can precalculate the
# basename of it.
self.m_iso_basename = os.path.basename(
urllib.url2pathname(urlparse.urlparse(iso_url)[2]))
m = re.match(r"(.*)cd.*iso|NetBSD-[0-9\._A-Z]+-(.*).iso", self.m_iso_basename)
if m is None:
raise RuntimeError("cannot guess architecture from ISO name '%s'"
% self.m_iso_basename)
if m.group(1) is not None:
self.m_arch = m.group(1)
if m.group(2) is not None:
self.m_arch = m.group(2)
check_arch_supported(self.m_arch, 'iso')
def iso_path(self):
if self.m_iso_path is not None:
return self.m_iso_path
else:
return os.path.join(self.download_local_arch_dir(),
self.m_iso_basename)
def default_workdir(self):
return url2dir(self.m_iso_url)
def make_iso(self):
self.download()
def download(self):
if self.m_iso_path is None:
download_if_missing_2(self.m_iso_url, self.iso_path())
else:
mkdir_p(self.workdir)
def arch(self):
return self.m_arch
def boot_from_default(self):
return 'cdrom-with-sets'
#############################################################################
# Helper class for killing the DomU when the last reference to the
# child process is dropped
class DomUKiller:
def __init__(self, frontend, name):
self.name = name
self.frontend = frontend
def __del__(self):
print "destroying domU", self.name
spawn(self.frontend, [self.frontend, "destroy", self.name])
def vmm_is_xen(vmm):
return vmm == 'xm' or vmm == 'xl'
def slog(fd, tag, data):
print >>fd, "%s(%.3f, %s)" % (tag, time.time(), repr(data))
def slog_info(fd, data):
slog(fd, 'info', data)
# A file-like object that escapes unprintable data and prefixes each
# line with a tag, for logging I/O.
class Logger:
def __init__(self, tag, fd):
self.tag = tag
self.fd = fd
def write(self, data):
slog(self.fd, self.tag, data)
def __getattr__(self, name):
return getattr(self.fd, name)
# http://stackoverflow.com/questions/616645/how-do-i-duplicate-sys-stdout-to-a-log-file-in-python
class multifile(object):
def __init__(self, files):
self._files = files
def __getattr__(self, attr, *args):
return self._wrap(attr, *args)
def _wrap(self, attr, *args):
def g(*a, **kw):
for f in self._files:
res = getattr(f, attr, *args)(*a, **kw)
return res
return g
class Anita:
def __init__(self, dist, workdir = None, vmm = None, vmm_args = None,
disk_size = None, memory_size = None, persist = False, boot_from = None,
structured_log = None, structured_log_file = None, no_install = False, tests = 'atf', dtb = ''):
self.dist = dist
if workdir:
self.workdir = workdir
else:
self.workdir = dist.default_workdir()
self.structured_log = structured_log
self.structured_log_file = structured_log_file
if self.structured_log_file:
self.structured_log_f = open(self.structured_log_file, "w")
self.unstructured_log_f = sys.stdout
else:
if self.structured_log:
self.structured_log_f = sys.stdout
self.unstructured_log_f = open("/dev/null", "w")
else:
self.structured_log_f = open("/dev/null", "w")
self.unstructured_log_f = sys.stdout
# Set the default disk size if none was given.
if disk_size is None:
if self.dist.arch() == 'evbarm-earmv7hf':
disk_size = '2G'
else:
disk_size = '1536M'
self.disk_size = disk_size
# Set the default memory size if none was given.
if memory_size is None:
if dist.arch() in ['amd64', 'evbarm-earmv7hf', 'pmax', 'sparc64']:
memory_size = "128M"
else:
memory_size = "32M"
self.memory_size_bytes = parse_size(memory_size)
self.persist = persist
self.boot_from = boot_from
self.no_install = no_install
self.qemu = arch_qemu_map.get(dist.arch())
if self.qemu is None and not self.dist.arch() in (arch_gxemul_list + arch_simh_list + arch_uae_list):
raise RuntimeError("NetBSD port '%s' is not supported" %
dist.arch())
if self.qemu == 'qemu-system-i386' and \
not try_program(['qemu-system-i386', '--version']) \
and try_program(['qemu', '--version']): \
self.qemu = 'qemu'
# Backwards compatibility
if vmm == 'xen':
vmm = 'xm'
elif not vmm and self.qemu:
vmm = 'qemu'
elif self.dist.arch() in arch_simh_list:
vmm = 'simh'
elif self.dist.arch() in arch_uae_list:
vmm = 'uae'
else:
vmm = 'gxemul'
self.vmm = vmm
if vmm_args is None:
vmm_args = []
if self.dist.arch() == 'pmax':
vmm_args += ["-e3max"]
elif self.dist.arch() == 'landisk':
vmm_args += ["-Elandisk"]
elif self.dist.arch() == 'hpcmips':
vmm_args += ["-emobilepro880"]
if dist.arch() == 'evbarm-earmv7hf':
vmm_args += ['-M', 'vexpress-a15', '-kernel', os.path.join(self.workdir, 'netbsd-VEXPRESS_A15.ub'),
'-append', "root=ld0a", '-dtb', dtb]
self.extra_vmm_args = vmm_args
self.is_logged_in = False
self.tests = tests
if dist.arch() == 'evbarm-earmv7hf':
self.boot_from = 'sd'
def slog(self, message):
slog_info(self.structured_log_f, message)
# Wrapper around pexpect.spawn to let us log the command for
# debugging. Note that unlike os.spawnvp, args[0] is not
# the name of the command.
def pexpect_spawn(self, command, args):
print command, " \\\n ".join(args)
return pexpect_spawn_log(self.structured_log_f, command, args)
# The path to the NetBSD hard disk image
def wd0_path(self):
return os.path.join(self.workdir, "wd0.img")
# Return the memory size rounded up to whole megabytes
def memory_megs(self):
megs = (self.memory_size_bytes + 2 ** 20 - 1) / 2 ** 20
if megs != self.memory_size_bytes / 2 **20:
print >>sys.stderr, \
"warning: rounding up memory size of %i bytes to %i megabytes" \
% (self.memory_size_bytes, megs)
return megs
def configure_child(self, child):
# Log reads from child
child.logfile_read = multifile([self.unstructured_log_f, Logger('recv', self.structured_log_f)])
# Log writes to child
child.logfile_send = Logger('send', self.structured_log_f)
child.timeout = 600
child.setecho(False)
# Xen installs sometimes fail if we don't increase this
# from the default of 0.1 seconds. And powering down noemu
# using iLO3 over ssh takes more than 5 seconds.
child.delayafterclose = 30.0
# Also increase this just in case
child.delayafterterminate = 30.0
self.child = child
def start_uae(self, vmm_args = []):
f = open(os.path.join(self.workdir, 'netbsd.uae'), 'w')
f.write('kickstart_rom_file=/home/utkarsh009/uae/kick3.1.rom\n' +
'kbd_lang=us\n' +
'gfx_linemode=double\n' +
'sound_output=interrupts\n' +
'sound_channels=stereo\n' +
'sound_max_buff=44100\n' +
'cpu_type=68040\n' +
'cpu_speed=30\n' +
'cpu_compatible=false\n' +
'nr_floppies=1\n' +
'rtc=a3000\n' +
'wdc=both\n' +
'z3mem_size=1024\n' +
'wdcfile=rw,32,16,0,512,' + os.path.abspath(self.wd0_path()) + '\n' +
'\n'.join(vmm_args) + '\n' +
'ethercard=false\n' +
'gmtime=true\n' +
'use_gui=no\n' +
'vnc_screen=0\n' +
'vnc_password=\n' +
'vnc_viewonly=ok')
f.close()
child = self.pexpect_spawn('uae', ['-f', os.path.join(self.workdir, 'netbsd.uae'), '-I', ''])
self.configure_child(child)
return child
def start_simh(self, vmm_args = []):
f = open(os.path.join(self.workdir, 'netbsd.ini'), 'w')
f.write('set cpu ' + str(self.memory_megs()) + 'm\n' +
'set rq0 ra92\n' +
'set rq3 cdrom\n' +
'\n'.join(vmm_args) + '\n' +
'attach rq0 ' + self.wd0_path() + '\n' +
'attach -r rq3 ' + self.dist.iso_path() + '\n' +
'boot cpu')
f.close()
child = self.pexpect_spawn('simh-vax', [os.path.join(self.workdir, 'netbsd.ini')])
self.configure_child(child)
return child
def start_gxemul(self, vmm_args):
child = self.pexpect_spawn('gxemul', ["-M", str(self.memory_megs()) + 'M',
"-d", os.path.abspath(self.wd0_path())] + self.extra_vmm_args + vmm_args)
self.configure_child(child)
return child
def start_qemu(self, vmm_args, snapshot_system_disk):
# Log the qemu version to stdout
subprocess.call([self.qemu, '--version'])
# Start the actual qemu child process
child = self.pexpect_spawn(self.qemu, [
"-m", str(self.memory_megs()),
"-drive", ("file=%s,format=raw,media=disk,snapshot=%s" %
(self.wd0_path(), ("off", "on")[snapshot_system_disk])) + ("",",if=sd")[self.dist.arch() == 'evbarm-earmv7hf'],
"-nographic"
] + vmm_args + self.extra_vmm_args)
self.configure_child(child)
return child
def xen_disk_arg(self, path, devno = 0, writable = True):
if self.vmm == 'xm':
return "disk=file:%s,0x%x,%s" % (path, devno, "rw"[writable])
else: # xl
return "disk=file:%s,xvd%s,%s" % (path, chr(ord('a') + devno), "rw"[writable])
def qemu_disk_args(self, path, devno = 0, writable = True, snapshot = False):
return ["-drive", "file=%s,format=raw,media=disk,snapshot=%s" % (path, ["off", "on"][snapshot])]
def qemu_cdrom_args(self, path, devno):
return ["-drive", "file=%s,format=raw,media=cdrom,readonly=on" % (path)]
def gxemul_cdrom_args(self):
return ('', 'd:')[self.dist.arch() == 'landisk'] + self.dist.iso_path()
def gxemul_disk_args(self, path):
return ["-d", path]
def string_arg(self, name, value):
if self.vmm == 'xm':
return '%s=%s' % (name, value)
else: # xl
return '%s="%s"' % (name, value)
def start_xen_domu(self, vmm_args):
frontend = self.vmm
name = "anita-%i" % os.getpid()
args = [
frontend,
"create",
"-c",
"/dev/null",
self.xen_disk_arg(os.path.abspath(self.wd0_path()), 0, True),
"memory=" + str(self.memory_megs()),
self.string_arg('name', name)
] + vmm_args + self.extra_vmm_args
# Multiple "disk=" arguments are no longer supported with xl;
# combine them
if self.vmm == 'xl':
disk_args = []
no_disk_args = []
for arg in args:
if arg.startswith('disk='):
disk_args.append(arg[5:])
else:
no_disk_args.append(arg)
args = no_disk_args + [ "disk=[%s]" % (','.join(["'%s'" % arg for arg in disk_args]))]
child = self.pexpect_spawn(args[0], args[1:])
self.configure_child(child)
# This is ugly; we reach into the child object and set an
# additional attribute. The name of the attribute,
# "garbage_collector" below, is arbitrary, but must not
# conflict with any existing attribute of the child
# object. Its purpose is only to hold a reference to the
# DomUKiller object, such that when the child object is
# destroyed, the destructor of the DomUKiller object
# is also invoked.
child.garbage_collector = DomUKiller(frontend, name)
return child
def start_noemu(self, vmm_args):
noemu_always_args = [
'--workdir', self.workdir,
'--releasedir', os.path.join(self.workdir, 'download'),
'--arch', self.dist.arch()
]
child = self.pexpect_spawn('sudo', ['noemu'] +
noemu_always_args + vmm_args + self.extra_vmm_args)
self.configure_child(child)
return child
def install_amiga(self):
self.dist.make_iso()
print "Creating hard disk image...",
sys.stdout.flush()
make_dense_image(self.wd0_path(), 1024000000)
print "Creating install image...",
sys.stdout.flush()
wd1_path = os.path.join(self.workdir, 'wd1.img')
make_dense_image(wd1_path, 1024000000)
rdb_conf_a = os.path.join(self.workdir,'rdbedit_a.conf')
rdb_conf_b = os.path.join(self.workdir,'rdbedit_b.conf')
f = open(rdb_conf_a, 'w+')
f.write('c3 7000\n' + 'p3\n' + 'nmini\n' + 'fbootable\n' + 'o16\n' + 'tNBR\\7\n' + 'q\n' +
'c4 8624\n' + 'p4\n' + 'nsets\n' + 'o16\n' + 'tNBU\\12\n' + 'q\n' + 'q\n' + 'Y\n')
f.seek(0)
subprocess.Popen(['rdbedit', '-Fies', '2', wd1_path], stdin=f)
f.close()
g = open(rdb_conf_b, 'w+')
g.write('c3 15624\n' + 'p3\n' + 'nroot\n' + 'fbootable\n' + 'o16\n' +
'tNBR\\7\n' + 'q\n' + 'q\n' + 'Y\n')
g.seek(0)
subprocess.Popen(['rdbedit', '-Fies', '2', self.wd0_path()], stdin=g)
g.close()
miniroot_fn = os.path.join(self.workdir, 'download', 'amiga', 'installation', 'miniroot', 'miniroot.fs.gz')
bootxx = os.path.join(self.workdir, 'bootxx')
bootblock = os.path.join(self.workdir, 'bootblock')
boot_command = "netbsd -Cc 4000"
h = open(miniroot_fn, 'r')
subprocess.call('zcat | dd of=' + bootxx + ' conv=sync' + ' bs=512' + ' count=16', shell = True, stdin = h)
h.seek(0)
open(bootblock, 'w').close()
spawn('installboot',['installboot', '-m', 'amiga', '-o', 'command=' + boot_command, bootblock, bootxx])
spawn('dd',['dd', 'if=' + bootblock, 'of=' + wd1_path, 'seek=128', 'conv=sync,notrunc', 'bs=512'])
subprocess.call('zcat | dd of=' + wd1_path + ' seek=144' + ' skip=16' + ' conv=sync,notrunc' + ' bs=512', shell = True, stdin = h)
h.close()
spawn('dd', ['dd', 'if=' + self.dist.iso_path(), 'of=' + wd1_path, 'seek=896128', 'conv=sync,notrunc', 'bs=512'])
vmm_args = ['wdcfile=rw,32,16,0,512,' + os.path.abspath(wd1_path)]
child = self.start_uae(vmm_args)
loop = 0
while True:
loop = loop + 1
if loop == 28:
raise RuntimeError("loop detected")
child.expect(
# Group 1
"(map you want to activate)|" +
# Group 2
"(nstall or)|" +
# Group 3
"(Proceed with installation)|" +
# Group 4
"(Look at which)|" +
# Group 5
"(Which disk is the root disk)|" +
# Group 6
"(Device name)|" +
# Group 7
"(Ok to configure wd0b as a swap device)|" +
# Group 8
"(Edit)|" +
# Group 9
"(Configure the network)|" +
# Group 10
"(Edit the fstab)|" +
# Group 11
"(Use verbose listing for extractions)|" +
# Group 12
"(or local)|" +
# Group 13
"(Is the file-system with the install sets already mounted)|"
# Group 14
"(Which is the disk with the installation sets)|" +
# Group 15
"(Partition)|" +
# Group 16
"(Which filesystem type)|" +
# Group 17
"(contains the savesets)|" +
# Group 18
"(Continue extraction)|" +
# Group 19
"(or all)|" +
# Group 20
"(Extract more sets)|" +
# Group 21
"(What timezone are you in)|" +
# Group 22
"(on the installation filesystem)|" +
# Group 23
"(Should a boot block be installed)|" +
# Group 24
"(Boot command)|" +
# Group 25
"(the installer will restart itself)",
10800)
if child.match.group(1):
child.send("6\n")
elif child.match.group(2):
child.send("I\n")
elif child.match.group(3):
child.send("y\n")
elif child.match.group(4):
child.send("\n")
elif child.match.group(5):
child.send("wd0\n")
elif child.match.group(6):
child.send("\n")
elif child.match.group(7):
child.send("y\n")
elif child.match.group(8):
child.send("n\n")
elif child.match.group(9):
child.send("n\n")
elif child.match.group(10):
child.send("n\n")
elif child.match.group(11):
child.send("y\n")
elif child.match.group(12):
child.send("d\n")
elif child.match.group(13):
child.send("n\n")
elif child.match.group(14):
child.send("wd1\n")
elif child.match.group(15):
child.send("d\n")
elif child.match.group(16):
child.send("cd9660\n")
elif child.match.group(17):
child.send("amiga/binary/sets\n")
elif child.match.group(18):
child.send("\n")
elif child.match.group(19):
child.send("all\n")
elif child.match.group(20):
child.send("n\n")
elif child.match.group(21):
child.send("\n")
elif child.match.group(22):
child.send("n\n")
elif child.match.group(23):
child.send("y\n")
elif child.match.group(24):
child.send("netbsd -Cc 4000\n")
elif child.match.group(25):
break
else:
raise AssertionError()
while True:
child.expect("(#)|(halted by root)")
if child.match.group(1):
# Root shell prompt
child.send("halt\n")
else:
break
child.close()
# Make sure all refs go away
child = None
self.child = None
os.unlink(wd1_path)
self.dist.cleanup()
def install_sysinst(self):
# Download or build the install ISO
self.dist.set_workdir(self.workdir)
if self.dist.arch() == 'evbarm-earmv7hf':
self.dist.download()
else:
self.dist.make_iso()
arch = self.dist.arch()
if self.vmm != 'noemu':
print "Creating hard disk image...",
sys.stdout.flush()
make_dense_image(self.wd0_path(), parse_size(self.disk_size))
print "done."
if self.dist.arch() == 'evbarm-earmv7hf':
# Unzip the image
gzimage_fn = os.path.join(self.workdir,
'download', self.dist.arch(),
'binary', 'gzimg', 'armv7.img.gz')
gzimage = open(gzimage_fn, 'r')
subprocess.call('gunzip | dd of=' + self.wd0_path() + ' conv=notrunc', shell = True, stdin = gzimage)
gzimage.close()
# Unzip the kernel
gzkernel_fn = os.path.join(self.workdir,
'download', self.dist.arch(), 'binary', 'kernel',
'netbsd-VEXPRESS_A15.ub.gz')
gzkernel = open(gzkernel_fn, 'r')
kernel_fn = os.path.join(self.workdir, "netbsd-VEXPRESS_A15.ub")
kernel = open(kernel_fn, 'w')
subprocess.call('gunzip', stdin = gzkernel, stdout = kernel)
kernel.close()
gzkernel.close()
return
# The name of the CD-ROM device holding the sets
cd_device = None
if vmm_is_xen(self.vmm):
# Download XEN kernels
xenkernels = [k for k in [self.dist.xen_kernel(), self.dist.xen_install_kernel()] if k]
for kernel in xenkernels:
download_if_missing_3(self.dist.dist_url(),
self.dist.download_local_arch_dir(),
["binary", "kernel", kernel],
True)
vmm_args = [
self.string_arg('kernel', os.path.abspath(os.path.join(self.dist.download_local_arch_dir(),
"binary", "kernel", self.dist.xen_install_kernel()))),
self.xen_disk_arg(os.path.abspath(self.dist.iso_path()), 1, False)
]
child = self.start_xen_domu(vmm_args)
cd_device = 'xbd1d'
elif self.vmm == 'qemu':
# Determine what kind of media to boot from.
floppy_paths = [ os.path.join(self.dist.floppy_dir(), f) \
for f in self.dist.floppies() ]
boot_cd_path = os.path.join(self.dist.boot_iso_dir(), self.dist.boot_isos()[0])
if self.boot_from is None:
self.boot_from = self.dist.boot_from_default()
if self.boot_from is None and len(floppy_paths) == 0:
self.boot_from = 'cdrom'
if self.boot_from is None:
self.boot_from = 'floppy'
# Set up VM arguments based on the chosen boot media
if self.boot_from == 'cdrom':
vmm_args = self.qemu_cdrom_args(boot_cd_path, 1)
vmm_args += self.qemu_cdrom_args(self.dist.iso_path(), 2)
vmm_args += ["-boot", "d"]
cd_device = 'cd1a'
elif self.boot_from == 'floppy':
vmm_args = self.qemu_cdrom_args(self.dist.iso_path(), 1)
if len(floppy_paths) == 0:
raise RuntimeError("found no boot floppies")
vmm_args += ["-drive", "file=%s,format=raw,if=floppy,readonly=on" % floppy_paths[0], "-boot", "a"]
cd_device = 'cd0a';
elif self.boot_from == 'cdrom-with-sets':
# Single CD
if not self.dist.arch() == 'sparc64':
vmm_args = self.qemu_cdrom_args(self.dist.iso_path(), 1)
else:
vmm_args = ['-cdrom', self.dist.iso_path()]
vmm_args += ["-boot", "d"]
cd_device = 'cd0a'
child = self.start_qemu(vmm_args, snapshot_system_disk = False)
elif self.vmm == 'noemu':
child = self.start_noemu(['--boot-from', 'net'])
elif self.vmm == 'gxemul':
cd_device = 'cd0a'
if self.dist.arch() == 'hpcmips':
cd_device = 'cd0d'
elif self.dist.arch() == 'landisk':
cd_device = 'wd1a'
vmm_args = ["-d", self.gxemul_cdrom_args()]
if self.dist.arch() in ['pmax', 'landisk']:
vmm_args += [os.path.abspath(os.path.join(self.dist.download_local_arch_dir(),
"binary", "kernel", "netbsd-INSTALL.gz"))]
elif self.dist.arch() == 'hpcmips':
vmm_args += [os.path.abspath(os.path.join(self.dist.download_local_arch_dir(),
"installation", "netbsd.gz"))]
child = self.start_gxemul(vmm_args)
elif self.vmm == 'simh':
cd_device = 'cd0a'
child = self.start_simh()
child.expect(">>>")
child.send("boot dua3\r\n")
else:
raise RuntimeError('unknown vmm %s' % self.vmm)
term = None
if self.dist.arch() in ['hpcmips', 'landisk']:
term = 'vt100'
# Do the floppy swapping dance and other pre-sysinst interaction
floppy0_name = None
while True:
# NetBSD/i386 will prompt for a terminal type if booted from a
# CD-ROM, but not when booted from floppies. Sigh.
child.expect(
# Group 1-2
"(insert disk (\d+), and press return...)|" +
# Group 3
"(a: Installation messages in English)|" +
# Group 4
"(Terminal type)|" +
# Group 5
"(Installation medium to load the additional utilities from: )|"
# Group 6
"(1. Install NetBSD)"
)
if child.match.group(1):
# We got the "insert disk" prompt
# There is no floppy 0, hence the "- 1"
floppy_index = int(child.match.group(2)) - 1
# Escape into qemu command mode to switch floppies
child.send("\001c")
# We used to wait for a (qemu) prompt here, but qemu 0.9.1
# no longer prints it
# child.expect('\(qemu\)')
if not floppy0_name:
# Between qemu 0.9.0 and 0.9.1, the name of the floppy
# device accepted by the "change" command changed from
# "fda" to "floppy0" without any provision for backwards
# compatibility. Deal with it. Also deal with the fact
# that as of qemu 0.15, "info block" no longer prints
# "type=floppy" for floppy drives. And in qemu 2.5.0,
# the format changed again from "floppy0: " to
# "floppy0 (#block544): ", so we no longer match the
# colon and space.
child.send("info block\n")
child.expect(r'\n(fda|floppy0)')
floppy0_name = child.match.group(1)
# Now we can change the floppy
child.send("change %s %s\n" %
(floppy0_name, floppy_paths[floppy_index]))
# Exit qemu command mode
child.send("\001c\n")
elif child.match.group(3):
# "Installation messages in English"
break
elif child.match.group(4):
# "Terminal type"
child.send("xterm\n")
term = "xterm"
continue
elif child.match.group(5):
# "Installation medium to load the additional utilities from"
# (SPARC)
child.send("cdrom\n")
child.expect("CD-ROM device to use")
child.send("\n")
child.expect("Path to instfs.tgz")
child.send("\n")
child.expect("Terminal type")
# The default is "sun", but anita is more likely to run
# in an xterm or some other ansi-like terminal than on
# a sun console.
child.send("xterm\n")
term = "xterm"
child.expect("nstall/Upgrade")
child.send("I\n")
elif child.match.group(6):
# "1. Install NetBSD"
child.send("1\n")
# Confirm "Installation messages in English"
child.send("\n")
# i386 and amd64 ask for keyboard type here; sparc doesn't
while True:
child.expect("(Keyboard type)|(a: Install NetBSD to hard disk)|" +
"(Shall we continue)")
if child.match.group(1) or child.match.group(2):
child.send("\n")
elif child.match.group(3):
child.expect("b: Yes")
child.send("b\n")
break
else:
raise AssertionError
# Depending on the number of disks attached, we get either
# "found only one disk" followed by "Hit enter to continue",
# or "On which disk do you want to install".
child.expect("(Hit enter to continue)|" +
"(On which disk do you want to install)")
if child.match.group(1):
child.send("\n")
elif child.match.group(2):
child.send("a\n")
else:
raise AssertionError
def choose_no():
child.expect("([a-z]): No")
child.send(child.match.group(1) + "\n")
def choose_yes():
child.expect("([a-z]): Yes")
child.send(child.match.group(1) + "\n")
# Keep track of sets we have already handled, by label.
# This is needed so that parsing a pop-up submenu is not
# confused by earlier output echoing past choices.
labels_seen = set()
def choose_sets(set_list):
sets_this_screen = []
# First parse the set selection screen or popup; it's messy.
while True:
# Match a letter-label pair, like "h: Compiler Tools",
# followed by an installation status of Yes, No, All,
# or None. The label can be separated from the "Yes/No"
# field either by spaces (at least two, so that there can
# be single spaces within the label), or by a cursor
# positioning escape sequence. In the case of the
# "X11 fonts" set, we strangely get both a single space
# and an escape sequence, which seems disoptimal.
#
# Alternatively, match the special letter "x: " which
# is not followed by an installation status.
child.expect(
"(?:([a-z]): ([^ \x1b]+(?: [^ \x1b]+)*)(?:(?:\s\s+)|(?:\s?\x1b\[\d+;\d+H\x00*))(Yes|No|All|None))|(x: )")
(letter, label, yesno, exit) = child.match.groups()
if exit:
if len(sets_this_screen) != 0:
break
else:
for set in set_list:
if re.match(set['label'], label) and label not in labels_seen:
sets_this_screen.append({
'set': set,
'letter': letter,
'state': yesno
})
labels_seen.add(label)
# Then make the actual selections
for item in sets_this_screen:
set = item['set']
enable = set['install']
state = item['state']
group = set.get('group')
if (enable and state == "No" or \
not enable and state == "Yes") \
or group:
child.send(item['letter'] + "\n")
if group:
# Recurse to handle sub-menu
choose_sets(group)
# Exit the set selection menu
child.send("x\n")
# Older NetBSD versions show a prompt like [re0] and ask you
# to type in the interface name (or enter for the default);
# newer versions show a menu.
def choose_interface_oldstyle():
self.slog('old-style interface list')
# Choose the first non-fwip interface
while True:
child.expect(r"([a-z]+)([0-9]) ")
ifname = child.match.group(1)
ifno = child.match.group(2)
self.slog('old-style interface: <%s,%s>' % (ifname, ifno))
if ifname != 'fwip':
# Found an acceptable interface
child.send("%s%s\n" % (ifname, ifno))
break
def choose_interface_newstyle():
self.slog('new-style interface list')
child.expect('Available interfaces')
# Choose the first non-fwip interface
while True:
# Make sure to match the digit after the interface
# name so that we don't accept a partial interface
# name like "fw" from "fwip0".
child.expect(r"([a-z]): ([a-z]+)[0-9]")
if child.match.group(2) != 'fwip':
# Found an acceptable interface
child.send(child.match.group(1) + "\n")
break
def configure_network():
child.expect("Network media type")
child.send("\n")
child.expect("Perform (DHCP )?autoconfiguration")
child.expect("([a-z]): No")
child.send(child.match.group(1) + "\n")
def choose_a():
child.send("a\n")
def choose_dns_server():
child.expect("([a-z]): other")
child.send(child.match.group(1) + "\n")
child.send("10.0.1.1\n")
expect_any(child,
r"Your host name", "anita-test\n",
r"Your DNS domain", "netbsd.org\n",
r"Your IPv4 (number)|(address)", "10.169.0.2\n",
r"IPv4 Netmask", "255.255.255.0\n",
r"IPv4 gateway", "10.169.0.1\n",
r"IPv4 name server", "10.0.1.1\n",
r"Perform IPv6 autoconfiguration", choose_no,
r"Select (IPv6 )?DNS server", choose_dns_server,
r"Are they OK", choose_yes)
self.network_configured = True
self.network_configured = False
# Many different things can happen at this point:
#
# Versions older than 2009/08/23 21:16:17 will display a menu
# for choosing the extraction verbosity
#
# Versions older than 2010/03/30 20:09:25 will display a menu for
# choosing the CD-ROM device (newer versions will choose automatically)
#
# Versions older than Fri Apr 6 23:48:53 2012 UTC will ask
# you to "Please choose the timezone", wheras newer ones will
# instead as you to "Configure the additional items".
#
# At various points, we may or may not get "Hit enter to continue"
# prompts (and some of them seem to appear nondeterministically)
#
# i386/amd64 can ask whether to use normal or serial console bootblocks
#
# Try to deal with all of the possible options.
#
# We specify a longer timeout than the default here, because the
# set extraction can take a long time on slower machines.
#
# It has happened (at least with NetBSD 3.0.1) that sysinst paints the
# screen twice. This can cause problem because we will then respond
# twice, and the second response will be interpreted as a response to
# a subsequent prompt. Therefore, we check whether the match is the
# same as the previous one and ignore it if so.
#
# OTOH, -current as of 2009.08.23.20.57.40 will issue the message "Hit
# enter to continue" twice in a row, first as a result of MAKEDEV
# printing a warning messages "MAKEDEV: dri0: unknown device", and
# then after "sysinst will give you the opportunity to configure
# some essential things first". We match the latter text separately
# so that the "Hit enter to continue" matches are not consecutive.
#
# The changes of Apr 6 2012 broght with them a new redraw problem,
# which is worked around using the seen_essential_things variable.
#
prevmatch = []
seen_essential_things = 0
loop = 0
while True:
loop = loop + 1
if loop == 20:
raise RuntimeError("loop detected")
child.expect(
# Group 1
"(a: Progress bar)|" +
# Group 2
"(a: CD-ROM)|" +
# Group 3-4
"(([cx]): Continue)|" +
# Group 5
"(Hit enter to continue)|" +
# Group 6
"(b: Use serial port com0)|" +
# Group 7
"(Please choose the timezone)|" +
# Group 8
"(essential things)|" +
# Group 9
"(Configure the additional items)|" +
# Group 10
"(Multiple CDs found)|" +
# Group 11
"(The following are the http site)|" +
# Group 12
"(Is the network information you entered accurate)|" +
# Group 13-14 (old-style / new-style)
"(I have found the following network interfaces)|(Which network device would you like to use)|" +
# Group 15
"(No allows you to continue anyway)|" +
# Group 16
r"(Can't connect to)|" +
# Group 17
"(/sbin/newfs)|" +
# Group 18
"(not-in-use)|" +
# Group 19
"(not-in-use)|" +
# Group 20-21
"(([a-z]): Custom installation)|" +
# Group 22
"(a: This is the correct geometry)|" +
# Group 23
"(a: Use one of these disks)|" +
# Group 24
"(a: Set sizes of NetBSD partitions)|" +
# Group 25
"(Sysinst could not automatically determine the BIOS geometry of the disk)",
10800)
if child.match.groups() == prevmatch:
self.slog('ignoring repeat match')
continue
prevmatch = child.match.groups()
if child.match.group(1):
# (a: Progress bar)
child.send("\n")
elif child.match.group(2):
# (a: CD-ROM)
if self.vmm == 'noemu':
child.send("c\n") # install from HTTP
# We next end up at either "Which device shall I"
# or "The following are the http site" depending on
# the NetBSD version.
else:
child.send("a\n") # install from CD-ROM
elif child.match.group(3):
# CDROM device selection
if cd_device != 'cd0a':
child.send("a\n" + cd_device + "\n")
# (([cx]): Continue)
# In 3.0.1, you type "c" to continue, whereas in -current,
# you type "x". Handle both cases.
child.send(child.match.group(4) + "\n")
elif child.match.group(5):
# (Hit enter to continue)
if seen_essential_things >= 2:
# This must be a redraw
pass
else:
child.send("\n")
elif child.match.group(6):
# (b: Use serial port com0)
child.send("bx\n")
elif child.match.group(7):
# (Please choose the timezone)
# "Press 'x' followed by RETURN to quit the timezone selection"
child.send("x\n")
# The strange non-deterministic "Hit enter to continue" prompt has
# also been spotted after executing the sed commands to set the
# root password cipher, with 2010.10.27.10.42.12 source.
while True:
child.expect("(([a-z]): DES)|(root password)|(Hit enter to continue)")
if child.match.group(1):
# DES
child.send(child.match.group(2) + "\n")
elif child.match.group(3):
# root password
break
elif child.match.group(4):
# (Hit enter to continue)
child.send("\n")
else:
raise AssertionError
# Don't set a root password
child.expect("b: No")
child.send("b\n")
child.expect("a: /bin/sh")
child.send("\n")
# "The installation of NetBSD-3.1 is now complete. The system
# should boot from hard disk. Follow the instructions in the
# INSTALL document about final configuration of your system.
# The afterboot(8) manpage is another recommended reading; it
# contains a list of things to be checked after the first
# complete boot."
#
# We are supposed to get a single "Hit enter to continue"
# prompt here, but sometimes we get a weird spurious one
# after running chpass above.
while True:
child.expect("(Hit enter to continue)|(x: Exit)")
if child.match.group(1):
child.send("\n")
elif child.match.group(2):
child.send("x\n")
break
else:
raise AssertionError
break
elif child.match.group(8):
# (essential things)
seen_essential_things += 1
elif child.match.group(9):
# (Configure the additional items)
child.expect("x: Finished configuring")
child.send("x\n")
break
elif child.match.group(10):
# (Multiple CDs found)
# This happens if we have a boot CD and a CD with sets;
# we need to choose the latter.
child.send("b\n")
elif child.match.group(11):
# (The following are the http site)
# \027 is control-w, which clears the field
child.send("a\n\02710.169.0.1\n") # IP address
child.send("b\n\027\n") # Directory = empty string
if not self.network_configured:
child.send("j\n") # Configure network
choose_interface_newstyle()
configure_network()
# We get 'Hit enter to continue' if this sysinst
# version tries ping6 even if we have not configured
# IPv6
expect_any(child,
r'Hit enter to continue', '\r',
r'x: Get Distribution', 'x\n')
r = child.expect(["Install from", "/usr/bin/ftp"])
if r == 0:
# ...and I'm back at the "Install from" menu?
# Probably the same bug reported as install/49440.
child.send("c\n") # HTTP
# And again...
child.expect("The following are the http site")
child.expect("x: Get Distribution")
child.send("x\n")
elif r == 1:
pass
else:
assert(0)
elif child.match.group(12):
# "Is the network information you entered accurate"
child.expect("([a-z]): Yes")
child.send(child.match.group(1) + "\n")
elif child.match.group(13):
# "(I have found the following network interfaces)"
choose_interface_oldstyle()
configure_network()
elif child.match.group(14):
# "(Which network device would you like to use)"
choose_interface_newstyle()
configure_network()
elif child.match.group(15):
choose_no()
child.expect("No aborts the install process")
choose_yes()
elif child.match.group(16):
self.slog("network problems detected")
child.send("\003") # control-c
gather_input(666)
for i in range(60):
child.send("ifconfig -a\n")
gather_input(1)
# would run netstat here but it's not on the install media
gather_input(30)
sys.exit(1)
elif child.match.group(17):
self.slog("matched newfs to defeat repeat match detection")
elif child.match.group(20):
# Custom installation is choice "d" in 6.0,
# but choice "c" or "b" in older versions
# We could use "Minimal", but it doesn't exist in
# older versions.
child.send(child.match.group(21) + "\n")
# Enable/disable sets.
choose_sets(self.dist.sets)
# On non-Xen i386/amd64 we first get group 22 or 23,
# then group 24; on sparc and Xen, we just get group 24.
elif (child.match.group(22) or child.match.group(23)):
if child.match.group(22):
child.send("\n")
elif child.match.group(23):
child.send("a\n")
child.expect("Choose disk")
child.send("0\n")
child.expect("b: Use the entire disk")
child.send("b\n")
while True:
child.expect(r'(Your disk currently has a non-NetBSD partition)|' +
r'(Do you want to install the NetBSD bootcode)|' +
r'(Do you want to update the bootcode)')
if child.match.group(1):
# Your disk currently has a non-NetBSD partition
child.expect("a: Yes")
child.send("\n")
elif child.match.group(2) or child.match.group(3):
# Install or replace bootcode
child.expect("a: Yes")
child.send("\n")
break
elif child.match.group(24):
# (a: Set sizes of NetBSD partitions)
child.send("a\n")
child.expect("Accept partition sizes")
# Press cursor-down enough times to get to the end of the list,
# to the "Accept partition sizes" entry, then press
# enter to continue. Previously, we used control-N ("\016"),
# but if it gets echoed (which has happened), it is interpreted by
# the terminal as "enable line drawing character set", leaving the
# terminal in an unusable state.
if term in ['xterm', 'vt100']:
# For unknown reasons, when using a terminal type of "xterm",
# sysinst puts the terminal in "application mode", causing the
# cursor keys to send a different escape sequence than the default.
cursor_down = "\033OB"
else:
# Use the default ANSI cursor-down escape sequence
cursor_down = "\033[B"
child.send(cursor_down * 8 + "\n")
child.expect("x: Partition sizes ok")
child.send("\n")
child.expect("Please enter a name for your NetBSD disk")
child.send("\n")
# "This is your last chance to quit this process..."
child.expect("Shall we continue")
child.expect("b: Yes")
child.send("b\n")
# newfs is run at this point
elif child.match.group(25):
# We need to enter these values in cases where sysinst could not
# determine disk geometry. Currently, this happens for NetBSD/hpcmips
child.expect("sectors")
child.send("\n")
child.expect("heads")
child.send("\n")
child.expect("b: Use the entire disk")
child.send("b\n")
else:
raise AssertionError
# Installation is finished, halt the system.
# Historically, i386 and amd64, you get a root shell,
# while sparc just halts.
# Since Fri Apr 6 23:48:53 2012 UTC, you are kicked
# back into the main menu.
while True:
child.expect("(Hit enter to continue)|(x: Exit Install System)|(#)|(halting machine)|(halted by root)")
if child.match.group(1):
child.send("\n")
elif child.match.group(2):
# Back in menu
child.send("x\n")
elif child.match.group(3):
# Root shell prompt
child.send("halt\n")
else:
# group 4 or 5: halted
break
child.close()
# Make sure all refs go away
child = None
self.child = None
self.dist.cleanup()
# Install NetBSD if not installed already
def install(self):
# This is needed for Xen and noemu, where we get the kernel
# from the dist rather than the installed image
self.dist.set_workdir(self.workdir)
if self.vmm == 'noemu':
self.dist.download()
self._install()
else:
# Already installed?
if os.path.exists(self.wd0_path()):
return
try:
if self.dist.arch() == 'amiga':
self.install_amiga()
else:
self.install_sysinst()
except:
if os.path.exists(self.wd0_path()):
os.unlink(self.wd0_path())
raise
# Boot the virtual machine (installing it first if it's not
# installed already). The vmm_args argument applies when
# booting, but not when installing. Does not wait for
# a login prompt.
def start_boot(self, vmm_args = None):
if vmm_args is None:
vmm_args = []
if not self.no_install:
self.install()
if self.dist.arch() in ['hpcmips', 'landisk']:
vmm_args += [os.path.abspath(os.path.join(self.dist.download_local_arch_dir(),
"binary", "kernel", "netbsd-GENERIC.gz"))]
if self.vmm == 'qemu':
child = self.start_qemu(vmm_args, snapshot_system_disk = not self.persist)
# "-net", "nic,model=ne2k_pci", "-net", "user"
elif vmm_is_xen(self.vmm):
child = self.start_xen_domu(vmm_args + [self.string_arg('kernel',
os.path.abspath(os.path.join(self.dist.download_local_arch_dir(),
"binary", "kernel", self.dist.xen_kernel())))])
elif self.vmm == 'noemu':
child = self.start_noemu(vmm_args + ['--boot-from', 'disk'])
elif self.vmm == 'gxemul':
child = self.start_gxemul(vmm_args)
elif self.vmm == 'simh':
child = self.start_simh(vmm_args)
child.expect(">>>")
child.send("boot dua0\r\n")
elif self.vmm == 'uae':
child = self.start_uae(vmm_args)
else:
raise RuntimeError('unknown vmm %s' % vmm)
self.child = child
return child
# Like start_boot(), but wait for a login prompt.
def boot(self, vmm_args = None):
self.start_boot(vmm_args)
self.child.expect("login:")
# Can't close child here because we still need it if called from
# interact()
return self.child
# Deprecated
def interact(self):
child = self.boot()
console_interaction(child)
def run_tests(self, timeout = 10800):
results_by_net = (self.vmm == 'noemu')
# Create a scratch disk image for exporting test results from the VM.
# The results are stored in tar format because that is more portable
# and easier to manipulate than a file system image, especially if the
# host is a non-NetBSD system.
#
# If we are getting the results back by tftp, this file will
# be overwritten.
scratch_disk_path = os.path.join(self.workdir, "tests-results.img")
if vmm_is_xen(self.vmm):
scratch_disk = 'xbd1d'
else:
scratch_disk = self.dist.scratch_disk()
mkdir_p(self.workdir)
scratch_image_megs = 100
make_dense_image(scratch_disk_path, parse_size('%dM' % scratch_image_megs))
# Leave a 10% safety margin
max_result_size_k = scratch_image_megs * 900
if vmm_is_xen(self.vmm):
scratch_disk_args = [self.xen_disk_arg(os.path.abspath(scratch_disk_path), 1, True)]
elif self.vmm == 'qemu':
scratch_disk_args = self.qemu_disk_args(os.path.abspath(scratch_disk_path), 1, True, False)
elif self.vmm == 'noemu':
scratch_disk_args = []
elif self.vmm == 'gxemul':
scratch_disk_args = self.gxemul_disk_args(os.path.abspath(scratch_disk_path))
elif self.vmm == 'simh':
scratch_disk_args = ['set rq1 ra92', 'attach rq1 ' + scratch_disk_path]
elif self.vmm == 'uae':
scratch_disk_args = ['wdcfile=rw,32,16,0,512,' + scratch_disk_path]
else:
raise RuntimeError('unknown vmm')
child = self.boot(scratch_disk_args)
self.login()
if self.tests == "kyua":
if self.shell_cmd("grep -q 'MKKYUA.*=.*yes' /etc/release") != 0:
raise RuntimeError("kyua is not installed.")
test_cmd = (
"kyua " +
"--loglevel=error " +
"--logfile=/tmp/tests/kyua-test.log " +
"test " +
"--store=/tmp/tests/store.db; " +
"echo $? >/tmp/tests/test.status; " +
"kyua " +
"report " +
"--store=/tmp/tests/store.db " +
"| tail -n 3; " +
"kyua " +
"--loglevel=error " +
"--logfile=/tmp/tests/kyua-report-html.log " +
"report-html " +
"--store=/tmp/tests/store.db " +
"--output=/tmp/tests/html; ")
elif self.tests == "atf":
atf_aux_files = ['/usr/share/xsl/atf/tests-results.xsl',
'/usr/share/xml/atf/tests-results.dtd',
'/usr/share/examples/atf/tests-results.css']
test_cmd = (
"{ atf-run; echo $? >/tmp/tests/test.status; } | " +
"tee /tmp/tests/test.tps | " +
"atf-report -o ticker:- -o xml:/tmp/tests/test.xml; " +
"(cd /tmp && for f in %s; do cp $f tests/; done;); " % ' '.join(atf_aux_files))
else:
raise RuntimeError('unknown testing framework %s' % self.test)
exit_status = self.shell_cmd(
"df -k | sed 's/^/df-pre-test /'; " +
"mkdir /tmp/tests && " +
"cd /usr/tests && " +
test_cmd +
("{ cd /tmp && " +
# Make sure the files will fit on the scratch disk
"test `du -sk tests | awk '{print $1}'` -lt %d && " % max_result_size_k +
# To guard against accidentally overwriting the wrong
# disk image, check that the disk contains nothing
# but nulls.
"test `</dev/r%s tr -d '\\000' | wc -c` = 0 && " % scratch_disk +
# "disklabel -W /dev/rwd1d && " +
"tar cf /dev/r%s tests; " % scratch_disk +
"}; " if not results_by_net else \
"{ cd /tmp && tar cf tests-results.img tests && echo put tests-results.img | tftp 10.169.0.1; };") +
"df -k | sed 's/^/df-post-test /'; " +
"ps -glaxw | sed 's/^/ps-post-test /'; " +
"vmstat -s; " +
"sh -c 'exit `cat /tmp/tests/test.status`'",
timeout)
# We give tar an explicit path to extract to guard against
# the possibility of an arbitrary file overwrite attack if
# anita is used to test an untrusted virtual machine.
tarfile = open(scratch_disk_path, "r")
subprocess.call(["tar", "xf", "-", "tests"],
cwd = self.workdir, stdin = tarfile)
# For backwards compatibility, point workdir/atf to workdir/tests.
compat_link = os.path.join(self.workdir, 'atf')
if not os.path.lexists(compat_link):
os.symlink('tests', compat_link)
return exit_status
# Backwards compatibility
run_atf_tests = run_tests
# Log in, if not logged in already
def login(self):
if self.is_logged_in:
return
login(self.child)
self.is_logged_in = True
# Run a shell command
def shell_cmd(self, cmd, timeout = -1):
self.login()
return shell_cmd(self.child, cmd, timeout)
# Halt the VM
def halt(self):
self.login()
self.child.send("halt\n")
try:
# Wait for text confirming the halt, or EOF
self.child.expect("(The operating system has halted)|(entering state S5)", timeout = 60)
except pexpect.EOF:
# Didn't see the text but got an EOF; that's OK.
print "EOF"
except pexpect.TIMEOUT, e:
# This is unexpected but mostly harmless
print "timeout waiting for halt confirmation:", e
def console_interaction(child):
# We need this in pexpect 2.x or everything will be printed twice
child.logfile_read = None
child.logfile_send = None
child.interact()
# Calling this directly is deprecated, use Anita.login()
def login(child):
# Send a newline character to get another login prompt, since boot() consumed one.
child.send("\n")
child.expect("login:")
child.send("root\n")
# This used to be "\n# ", but that doesn't work if the machine has
# a hostname
child.expect("# ")
def net_setup(child):
child.send("dhclient ne2\n")
child.expect("bound to.*\n# ")
# Generate a root shell prompt string that is less likely to appear in
# the console output by accident than the default of "# ". Must end with "# ".
def gen_shell_prompt():
return 'anita-root-shell-prompt-%s# ' % str(time.time())
# Quote a prompt in /bin/sh syntax, with some extra quotes
# in the middle so that an echoed command to set the prompt is not
# mistaken for the prompt itself.
def quote_prompt(s):
midpoint = len(s) / 2
return "".join("'%s'" % part for part in (s[0:midpoint], s[midpoint:]))
# Calling this directly is deprecated, use Anita.shell_cmd()
def shell_cmd(child, cmd, timeout = -1):
child.send("exec /bin/sh\n")
child.expect("# ")
prompt = gen_shell_prompt()
child.send("PS1=" + quote_prompt(prompt) + "\n")
prompt_re = prompt
child.expect(prompt_re)
child.send(cmd + "\n")
# Catch EOF to log the signalstatus, to help debug qemu crashes
try:
child.expect(prompt_re, timeout)
except pexpect.EOF:
print "pexpect reported EOF - VMM exited unexpectedly"
child.close()
print "exitstatus", child.exitstatus
print "signalstatus", child.signalstatus
raise
except:
raise
child.send("echo exit_status=$?=\n")
child.expect("exit_status=(\d+)=")
r = int(child.match.group(1))
child.expect(prompt_re, timeout)
return r
def test(child):
raise RuntimeError("global test() function is gone, use Anita.run_tests()")
#############################################################################
|
utkarsh009/anita
|
anita.py
|
Python
|
isc
| 82,722
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponseRedirect
from django.utils.decorators import method_decorator
from django.views.generic import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.views.generic.list import ListView
from django.conf import settings
from wafer.talks.models import Talk, ACCEPTED
from wafer.talks.forms import TalkForm
class EditOwnTalksMixin(object):
'''Users can edit their own talks as long as the talk is
"Under Consideration"'''
def get_object(self, *args, **kwargs):
object_ = super(EditOwnTalksMixin, self).get_object(*args, **kwargs)
if object_.can_edit(self.request.user):
return object_
else:
raise PermissionDenied
class LoginRequiredMixin(object):
'''Must be logged in'''
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
class UsersTalks(ListView):
template_name = 'wafer.talks/talks.html'
paginate_by = 25
def get_queryset(self):
# self.request will be None when we come here via the static site
# renderer
if (self.request and Talk.can_view_all(self.request.user)):
return Talk.objects.all()
return Talk.objects.filter(status=ACCEPTED)
class TalkView(DetailView):
template_name = 'wafer.talks/talk.html'
model = Talk
def get_object(self, *args, **kwargs):
'''Only talk owners can see talks, unless they've been accepted'''
object_ = super(TalkView, self).get_object(*args, **kwargs)
if object_.can_view(self.request.user):
return object_
else:
raise PermissionDenied
def get_context_data(self, **kwargs):
context = super(TalkView, self).get_context_data(**kwargs)
context['can_edit'] = self.object.can_edit(self.request.user)
return context
class TalkCreate(LoginRequiredMixin, CreateView):
model = Talk
form_class = TalkForm
template_name = 'wafer.talks/talk_form.html'
def get_context_data(self, **kwargs):
context = super(TalkCreate, self).get_context_data(**kwargs)
context['can_submit'] = getattr(settings, 'WAFER_TALKS_OPEN', True)
return context
def form_valid(self, form):
if not getattr(settings, 'WAFER_TALKS_OPEN', True):
raise ValidationError # Should this be SuspiciousOperation?
# Eaaargh we have to do the work of CreateView if we want to set values
# before saving
self.object = form.save(commit=False)
self.object.corresponding_author = self.request.user
self.object.save()
# Save the author information as well (many-to-many fun)
form.save_m2m()
return HttpResponseRedirect(self.get_success_url())
class TalkUpdate(EditOwnTalksMixin, UpdateView):
model = Talk
form_class = TalkForm
template_name = 'wafer.talks/talk_form.html'
def get_context_data(self, **kwargs):
context = super(TalkUpdate, self).get_context_data(**kwargs)
context['can_edit'] = self.object.can_edit(self.request.user)
return context
class TalkDelete(EditOwnTalksMixin, DeleteView):
model = Talk
template_name = 'wafer.talks/talk_delete.html'
success_url = reverse_lazy('wafer_page', args=('index',))
|
CarlFK/wafer
|
wafer/talks/views.py
|
Python
|
isc
| 3,564
|
# -*- coding: utf-8 -*-
# EForge project management system, Copyright © 2010, Element43
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
from django.db import models
from django.core.urlresolvers import reverse
from eforge.models import Project
from eforge.update.models import Update, register_update_type
from eforge.vcs import project_repository
class Revision(models.Model):
id_no = models.AutoField(primary_key=True)
id = models.CharField(max_length=40, db_index=True)
project = models.ForeignKey(Project)
parents = models.ManyToManyField('self', related_name='children')
date = models.DateTimeField()
@property
def vcs_revision(self):
""" Revision object from the VCS plugin """
if not getattr(self, '_vcs_revision', None):
self._vcs_revision = project_repository(self.project).revision(self.id)
return self._vcs_revision
class Update:
@classmethod
def user(self, revision):
return revision.author_user
@classmethod
def project(self, revision):
return revision.project
@classmethod
def summary(self, revision):
return 'Revision %s' % revision.short_id
@classmethod
def description(self, revision):
return revision.message
@classmethod
def url(self, revision):
return reverse('browse-revision',
args=[revision.project.slug, revision.id])
@classmethod
def date(self, revision):
return revision.date
register_update_type(Revision)
def _proxy_property(name):
def _proxy(self):
return getattr(self.vcs_revision, name)
setattr(Revision, name, property(_proxy))
_proxy_property('short_id')
_proxy_property('author_email')
_proxy_property('author_name')
_proxy_property('author_user')
_proxy_property('message')
_proxy_property('short_message')
_proxy_property('root')
|
oshepherd/eforge
|
eforge/vcs/models.py
|
Python
|
isc
| 2,863
|
from argparse import ArgumentParser
import socket
import struct
import sys
import threading
import time
from ._fakeds import FakeDS
__all__ = ["Netconsole", "main", "run"]
def _output_fn(s):
sys.stdout.write(
s.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)
)
sys.stdout.write("\n")
class StreamEOF(IOError):
pass
class Netconsole:
"""
Implements the 2018+ netconsole protocol
"""
TAG_ERROR = 11
TAG_INFO = 12
def __init__(self, printfn=_output_fn):
self.frames = {self.TAG_ERROR: self._onError, self.TAG_INFO: self._onInfo}
self.cond = threading.Condition()
self.sock = None
self.sockrfp = None
self.sockwfp = None
self.sockaddr = None
self.running = False
self.printfn = printfn
def start(self, address, port=1741, connect_event=None, block=True):
with self.cond:
if self.running:
raise ValueError("Cannot start without stopping first")
self.sockaddr = (address, port)
self.connect_event = connect_event
self.running = True
self._rt = threading.Thread(
target=self._readThread, name="nc-read-thread", daemon=True
)
self._rt.start()
if block:
self._keepAlive()
else:
self._kt = threading.Thread(
target=self._keepAlive, name="nc-keepalive-thread", daemon=True
)
self._kt.start()
@property
def connected(self):
return self.sockrfp is not None
def stop(self):
with self.cond:
self.running = False
self.cond.notify_all()
self.sock.close()
def _connectionDropped(self):
print(".. connection dropped", file=sys.stderr)
self.sock.close()
with self.cond:
self.sockrfp = None
self.cond.notify_all()
def _keepAliveReady(self):
if not self.running:
return -1
elif not self.connected:
return -2
def _keepAlive(self):
while self.running:
with self.cond:
ret = self.cond.wait_for(self._keepAliveReady, timeout=2.0)
if ret == -1:
return
elif ret == -2:
self._reconnect()
else:
try:
self.sockwfp.write(b"\x00\x00")
self.sockwfp.flush()
except IOError:
self._connectionDropped()
def _readThreadReady(self):
if not self.running:
return -1
return self.sockrfp
def _readThread(self):
while True:
with self.cond:
sockrfp = self.cond.wait_for(self._readThreadReady)
if sockrfp == -1:
return
try:
data = sockrfp.read(self._headerSz)
except IOError:
data = ""
if len(data) != self._headerSz:
self._connectionDropped()
continue
blen, tag = self._header.unpack(data)
blen -= 1
try:
buf = sockrfp.read(blen)
except IOError:
buf = ""
if len(buf) != blen:
self._connectionDropped()
continue
# process the frame
fn = self.frames.get(tag)
if fn:
fn(buf)
else:
print("ERROR: Unknown tag %s; Ignoring..." % tag, file=sys.stderr)
def _reconnect(self):
# returns once the socket is connected or an exit is requested
while self.running:
sys.stderr.write("Connecting to %s:%s..." % self.sockaddr)
try:
sock = socket.create_connection(self.sockaddr, timeout=3.0)
except IOError:
sys.stderr.write(" :(\n")
# don't busywait, just in case
time.sleep(1.0)
continue
else:
sys.stderr.write("OK\n")
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sockrfp = sock.makefile("rb")
sockwfp = sock.makefile("wb")
if self.connect_event:
self.connect_event.set()
with self.cond:
self.sock = sock
self.sockrfp = sockrfp
self.sockwfp = sockwfp
self.cond.notify_all()
break
#
# Message
#
_header = struct.Struct(">Hb")
_headerSz = _header.size
_errorFrame = struct.Struct(">fHHiB")
_errorFrameSz = _errorFrame.size
_infoFrame = struct.Struct(">fH")
_infoFrameSz = _infoFrame.size
_slen = struct.Struct(">H")
_slenSz = _slen.size
def _onError(self, b):
ts, _seq, _numOcc, errorCode, flags = self._errorFrame.unpack_from(b, 0)
details, nidx = self._getStr(b, self._errorFrameSz)
location, nidx = self._getStr(b, nidx)
callStack, _ = self._getStr(b, nidx)
self.printfn(
"[%0.2f] %d %s %s %s" % (ts, errorCode, details, location, callStack)
)
def _getStr(self, b, idx):
sidx = idx + self._slenSz
(blen,) = self._slen.unpack_from(b, idx)
nextidx = sidx + blen
return b[sidx:nextidx].decode("utf-8", errors="replace"), nextidx
def _onInfo(self, b):
ts, _seq = self._infoFrame.unpack_from(b, 0)
msg = b[self._infoFrameSz :].decode("utf-8", errors="replace")
self.printfn("[%0.2f] %s" % (ts, msg))
def run(address, connect_event=None, fakeds=False):
"""
Starts the netconsole loop. Note that netconsole will only send output
if the DS is connected. If you don't have a DS available, the 'fakeds'
flag can be specified to fake a DS connection.
:param address: Address of the netconsole server
:param connect_event: a threading.event object, upon which the 'set'
function will be called when the connection has
succeeded.
:param fakeds: Fake a driver station connection
"""
if fakeds:
ds = FakeDS()
ds.start(address)
nc = Netconsole()
nc.start(address, connect_event=connect_event)
def main():
parser = ArgumentParser()
parser.add_argument("address", help="Address of Robot")
parser.add_argument(
"-f",
"--fakeds",
action="store_true",
default=False,
help="Fake a driver station connection to the robot",
)
args = parser.parse_args()
run(args.address, fakeds=args.fakeds)
|
robotpy/pynetconsole
|
netconsole/netconsole.py
|
Python
|
isc
| 6,782
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# girc documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 10 20:20:32 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'girc'
copyright = '2015, Daniel Oaks'
author = 'Daniel Oaks'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
import sphinx_py3doc_enhanced_theme
html_theme = "sphinx_py3doc_enhanced_theme"
html_theme_path = [sphinx_py3doc_enhanced_theme.get_html_theme_path()]
html_theme_options = {
'bodyfont': '\'Lucida Grande\', Arial, sans-serif',
'headfont': '\'Lucida Grande\', Arial, sans-serif',
'footerbgcolor': 'white',
'footertextcolor': '#555555',
'relbarbgcolor': 'white',
'relbartextcolor': '#666666',
'relbarlinkcolor': '#444444',
'sidebarbgcolor': 'white',
'sidebartextcolor': '#444444',
'sidebarlinkcolor': '#444444',
'bgcolor': 'white',
'textcolor': '#222222',
'linkcolor': '#0072AA',
'visitedlinkcolor': '#6363bb',
'headtextcolor': '#1a1a1a',
'headbgcolor': 'white',
'headlinkcolor': '#aaaaaa',
'extrastyling': False,
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'gircdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'girc.tex', 'girc Documentation',
'Daniel Oaks', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'girc', 'girc Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'girc', 'girc Documentation',
author, 'girc', 'A modern Python IRC library for Python 3.4, based on asyncio.',
'Internet Relay Chat'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# taken from http://stackoverflow.com/questions/7250659/
import sys
from os.path import basename
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from sphinx.util.compat import Directive
from docutils import nodes, statemachine
class ExecDirective(Directive):
"""Execute the specified python code and insert the output into the document"""
has_content = True
def run(self):
oldStdout, sys.stdout = sys.stdout, StringIO()
tab_width = self.options.get('tab-width', self.state.document.settings.tab_width)
source = self.state_machine.input_lines.source(self.lineno - self.state_machine.input_offset - 1)
try:
exec('\n'.join(self.content))
text = sys.stdout.getvalue()
lines = statemachine.string2lines(text, tab_width, convert_whitespace=True)
self.state_machine.insert_input(lines, source)
return []
except Exception:
return [nodes.error(None, nodes.paragraph(text = "Unable to execute python code at %s:%d:" % (basename(source), self.lineno)), nodes.paragraph(text = str(sys.exc_info()[1])))]
finally:
sys.stdout = oldStdout
def setup(app):
app.add_directive('exec', ExecDirective)
|
DanielOaks/girc
|
docs/conf.py
|
Python
|
isc
| 10,875
|
from funktional.layer import Layer, Dense, StackedGRU, StackedGRUH0, Convolution1D, \
Embedding, OneHot, clipped_rectify, sigmoid, steeper_sigmoid, tanh, CosineDistance,\
last, softmax3d, params, Attention
from funktional.rhn import StackedRHN0
import funktional.context as context
from funktional.layer import params
import imaginet.task as task
from funktional.util import autoassign
import funktional.util as util
from funktional.util import orthogonal, xavier, uniform
import theano.tensor as T
import theano
import zipfile
import numpy
import StringIO
import json
import cPickle as pickle
from theano.tensor.shared_randomstreams import RandomStreams
from imaginet.simple_data import vector_padder
class Encoder(Layer):
def __init__(self, size_vocab, size, depth=1, recur_depth=1,
filter_length=6, filter_size=64, stride=2, drop_i=0.75 , drop_s=0.25, residual=False, seed=1):
autoassign(locals())
self.Conv = Convolution1D(self.size_vocab, self.filter_length, self.filter_size, stride=self.stride)
self.RHN = StackedRHN0(self.filter_size, self.size, depth=self.depth, recur_depth=self.recur_depth,
drop_i=self.drop_i, drop_s=self.drop_s, residual=self.residual, seed=self.seed)
def params(self):
return params(self.Conv, self.RHN)
def __call__(self, input):
return self.RHN(self.Conv(input))
class Visual(task.Task):
def __init__(self, config):
autoassign(locals())
self.margin_size = config.get('margin_size', 0.2)
self.updater = util.Adam(max_norm=config['max_norm'], lr=config['lr'])
self.Encode = Encoder(config['size_vocab'],
config['size'],
filter_length=config.get('filter_length', 6),
filter_size=config.get('filter_size', 1024),
stride=config.get('stride', 3),
depth=config.get('depth', 1),
recur_depth=config.get('recur_depth',1),
drop_i=config.get('drop_i', 0.75),
drop_s=config.get('drop_s', 0.25),
residual=config.get('residual', False),
seed=config.get('seed', 1))
self.Attn = Attention(config['size'], size=config.get('size_attn', 512))
self.ImgEncoder = Dense(config['size_target'], config['size'])
self.inputs = [T.ftensor3()]
self.target = T.fmatrix()
def compile(self):
task.Task.compile(self)
self.encode_images = self._make_encode_images()
self.conv_states = self._make_conv_states()
def params(self):
return params(self.Encode, self.Attn, self.ImgEncoder)
def __call__(self, input):
return util.l2norm(self.Attn(self.Encode(input)))
# FIXME HACK ALERT
def cost(self, i, s_encoded):
if self.config['contrastive']:
i_encoded = util.l2norm(self.ImgEncoder(i))
return util.contrastive(i_encoded, s_encoded, margin=self.margin_size)
else:
raise NotImplementedError
def args(self, item):
return (item['audio'], item['target_v'])
def _make_representation(self):
with context.context(training=False):
rep = self.Encode(*self.inputs)
return theano.function(self.inputs, rep)
def _make_pile(self):
with context.context(training=False):
rep = self.Encode.RHN.intermediate(self.Encode.Conv(*self.inputs))
return theano.function(self.inputs, rep)
def _make_conv_states(self):
with context.context(training=False):
states = self.Encode.Conv(*self.inputs)
return theano.function(self.inputs, states)
def _make_encode_images(self):
images = T.fmatrix()
with context.context(training=False):
rep = util.l2norm(self.ImgEncoder(images))
return theano.function([images], rep)
def encode_sentences(model, audios, batch_size=128):
"""Project audios to the joint space using model.
For each audio returns a vector.
"""
return numpy.vstack([ model.task.predict(vector_padder(batch))
for batch in util.grouper(audios, batch_size) ])
def layer_states(model, audios, batch_size=128):
"""Pass audios through the model and for each audio return the state of each timestep and each layer."""
lens = (numpy.array(map(len, audios)) + model.config['filter_length']) // model.config['stride']
rs = [ r for batch in util.grouper(audios, batch_size) for r in model.task.pile(vector_padder(batch)) ]
return [ r[-l:,:,:] for (r,l) in zip(rs, lens) ]
def conv_states(model, audios, batch_size=128):
"""Pass audios through the model and for each audio return the state of each timestep at the convolutional layer."""
lens = (numpy.array(map(len, audios)) + model.config['filter_length']) // model.config['stride']
rs = [ r for batch in util.grouper(audios, batch_size) for r in model.task.conv_states(vector_padder(batch)) ]
return [ r[-l:,:] for (r,l) in zip(rs, lens) ]
def encode_images(model, imgs, batch_size=128):
"""Project imgs to the joint space using model.
"""
return numpy.vstack([ model.task.encode_images(batch)
for batch in util.grouper(imgs, batch_size) ])
def symbols(model):
return model.batcher.mapper.ids.decoder
|
gchrupala/reimaginet
|
imaginet/defn/audiovis_rhn.py
|
Python
|
mit
| 5,671
|
def sum(*args):
total = 0
for number in args:
if isinstance(number, int):
total += number
return total
print(sum(1,5))
|
Fuchida/Archive
|
albme-py/script.py
|
Python
|
mit
| 150
|
"""
File: base.py
Author: Me
Email: yourname@email.com
Github: https://github.com/yourname
Description:
"""
from datetime import timedelta
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import reverse
from django.utils import timezone
from django.utils.crypto import get_random_string
from rest_framework.test import APITestCase
# from oauth2_provider.tests.test_utils import TestCaseUtils
from oauth2_provider.models import get_application_model, AccessToken
from rest_framework import status
import json
import pytest
from mixer.backend.django import mixer
Application = get_application_model()
pytestmark = pytest.mark.django_db
class PostsBaseTest(APITestCase):
def test_create_user_model(self):
User.objects.create(
username='Hello_World'
)
assert User.objects.count() == 1, "Should be equal"
def set_oauth2_app_by_admin(self, user):
app = Application.objects.create(
name='SuperAPI OAUTH2 APP',
user=user,
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_PASSWORD,
)
return app
def get_token(self, access_user, app):
random = get_random_string(length=1024)
access_token = AccessToken.objects.create(
user=access_user,
scope='read write',
expires=timezone.now() + timedelta(minutes=5),
token=f'{random}---{access_user.username}',
application=app
)
return access_token.token
|
elastic7327/django-tdd-restful-api
|
src/posts/tests/base.py
|
Python
|
mit
| 1,621
|
from __future__ import print_function
import numpy as np
import turtle
from argparse import ArgumentParser
from base64 import decodestring
from zlib import decompress
# Python 2/3 compat
try:
_input = raw_input
except NameError:
_input = input
'''TODO:
* add a matplotlib-based plotter
* add a path export function (for pasting back into HRM)
* path cleanup (length reduction)
* handwriting -> ascii conversion?
'''
def parse_images(filepath):
lines = open(filepath, 'rb')
while True:
# clever trick!
# when next() raises StopIteration, it stops this generator too
line = next(lines)
if not line.startswith(b'DEFINE '):
continue
_, kind, number = line.split()
kind = kind.decode('ascii')
number = int(number)
raw_data = b''
while not line.endswith(b';'):
line = next(lines).strip()
raw_data += line
# strip ; terminator
raw_data = raw_data[:-1]
# add base64 padding
if len(raw_data) % 4 != 0:
raw_data += b'=' * (2 - (len(raw_data) % 2))
# decode base64 -> decode zlib -> convert to byte array
data = np.fromstring(decompress(decodestring(raw_data)), dtype=np.uint8)
assert data.shape == (1028,)
path_len, = data[:4].view(np.uint32)
path = data[4:4+4*path_len].view(np.uint16).reshape((-1,2))
yield kind, number, path
def main():
ap = ArgumentParser()
ap.add_argument('--speed', type=int, default=10,
help='Number 1-10 for drawing speed, or 0 for no added delay')
ap.add_argument('program')
args = ap.parse_args()
for kind, number, path in parse_images(args.program):
title = '%s #%d, path length %d' % (kind, number, path.shape[0])
print(title)
if not path.size:
continue
pen_up = (path==0).all(axis=1)
# convert from path (0 to 65536) to turtle coords (0 to 655.36)
path = path / 100.
turtle.title(title)
turtle.speed(args.speed)
turtle.setworldcoordinates(0, 655.36, 655.36, 0)
turtle.pen(shown=False, pendown=False, pensize=10)
for i,pos in enumerate(path):
if pen_up[i]:
turtle.penup()
else:
turtle.setpos(pos)
turtle.pendown()
turtle.dot(size=10)
_input('Press enter to continue')
turtle.clear()
turtle.bye()
if __name__ == '__main__':
main()
|
perimosocordiae/pyhrm
|
extract_images.py
|
Python
|
mit
| 2,304
|
"""Test cltk.prosody."""
__license__ = 'MIT License. See LICENSE.'
from cltk.prosody.latin.scanner import Scansion as ScansionLatin
from cltk.prosody.latin.clausulae_analysis import Clausulae
from cltk.prosody.greek.scanner import Scansion as ScansionGreek
from cltk.prosody.latin.macronizer import Macronizer
import unittest
class TestSequenceFunctions(unittest.TestCase): # pylint: disable=R0904
"""Class for unittest"""
"""greek/scanner.py"""
# Test string for Greek prosody module unit testing
test = "νέος μὲν καὶ ἄπειρος, δικῶν ἔγωγε ἔτι. μὲν καὶ ἄπειρος."
def test_clean_text_greek(self):
"""Test _clean_text method."""
correct = "νέος μὲν καὶ ἄπειρος δικῶν ἔγωγε ἔτι. μὲν καὶ ἄπειρος."
current = ScansionGreek()._clean_text(self.test)
self.assertEqual(current, correct)
def test_clean_accents_greek(self):
"""Test _clean_accents method."""
correct = "νεος μεν και απειρος δικων εγωγε ετι. μεν και απειρος."
current = ScansionGreek()._clean_accents(self.test)
self.assertEqual(current, correct)
def test_tokenize_greek(self):
"""Test _tokenize method."""
correct = [['νεος', 'μεν', 'και', 'απειρος', 'δικων', 'εγωγε', 'ετι.'],
['μεν', 'και', 'απειρος.']]
current = ScansionGreek()._tokenize(self.test)
self.assertEqual(current, correct)
def test_make_syllables_greek(self):
"""Test _make_syllables method."""
correct = [[['νε', 'ος'], ['μεν'], ['και'], ['α', 'πει', 'ρος'],
['δι', 'κων'], ['ε', 'γω', 'γε'], ['ε', 'τι']], [['μεν'],
['και'], ['α', 'πει', 'ρος']]]
current = ScansionGreek()._make_syllables(self.test)
self.assertEqual(current, correct)
def test_scan_text_greek(self):
"""Test scan_text method."""
correct = ['˘¯¯¯˘¯¯˘¯˘¯˘˘x', '¯¯˘¯x']
current = ScansionGreek().scan_text(self.test)
self.assertEqual(current, correct)
"""latin/macronizer.py"""
def test_retrieve_morpheus_entry(self):
""" Text Macronizer()._retrieve_morpheus_tag()"""
correct = [('n-s---fb-', 'puella', 'puellā'), ('n-s---fn-', 'puella', 'puella'), ('n-s---fv-', 'puella', 'puella')]
current = Macronizer("tag_ngram_123_backoff")._retrieve_morpheus_entry("puella")
self.assertEqual(current, correct)
def test_macronize_word(self):
"""Test Macronizer()._macronize_word()"""
correct = ('flumine', 'n-s---nb-', 'flūmine')
current = Macronizer("tag_ngram_123_backoff")._macronize_word(('flumine', 'n-s---nb-'))
self.assertEqual(current, correct)
def test_macronize_tags(self):
"""Test Macronizer().macronize_tags()"""
text = "Quo usque tandem, O Catilina, abutere nostra patientia?"
correct = [('quo', 'd--------', 'quō'), ('usque', 'd--------', 'usque'), ('tandem', 'd--------', 'tandem'), (',', 'u--------', ','), ('o', 'e--------', 'ō'), ('catilina', 'n-s---mb-', 'catilīnā'), (',', 'u--------', ','), ('abutere', 'v2sfip---', 'abūtēre'), ('nostra', 'a-s---fb-', 'nostrā'), ('patientia', 'n-s---fn-', 'patientia'), ('?', None, '?')]
current = Macronizer("tag_ngram_123_backoff").macronize_tags(text)
self.assertEqual(current, correct)
def test_macronize_text(self):
"""Test Macronizer().macronize_text()"""
text = "Quo usque tandem, O Catilina, abutere nostra patientia?"
correct = "quō usque tandem , ō catilīnā , abūtēre nostrā patientia ?"
current = Macronizer("tag_ngram_123_backoff").macronize_text(text)
self.assertEqual(current, correct)
if __name__ == '__main__':
unittest.main()
|
TylerKirby/cltk
|
cltk/tests/test_nlp/test_prosody.py
|
Python
|
mit
| 3,978
|
from .. import __description__
from ..defender import VkRaidDefender, data, update_data
####################################################################################################
LOGO = '''\
_ _ _ _ __ _
__ _| | __ _ __ __ _(_) __| | __| | ___ / _| ___ _ __ __| | ___ _ __
\ \ / / |/ / | '__/ _` | |/ _` | / _` |/ _ \ |_ / _ \ '_ \ / _` |/ _ \ '__|
\ V /| < | | | (_| | | (_| | | (_| | __/ _| __/ | | | (_| | __/ |
\_/ |_|\_\ |_| \__,_|_|\__,_| \__,_|\___|_| \___|_| |_|\__,_|\___|_|
by alfred richardsn'''
####################################################################################################
from ..logger import logger
from ..settings import CLIENT_ID
import re
import os
import sys
import webbrowser
from getpass import getpass
from argparse import ArgumentParser
from vk_api.exceptions import ApiError
from requests.exceptions import InvalidSchema, ProxyError
class CLIDefender(VkRaidDefender):
def run(self, chat_ids, objectives):
self._chat_ids = chat_ids
self._objectives = objectives
start_screen()
logger.info('начинаю приём сообщений')
try:
self.listen()
except KeyboardInterrupt:
raise
except Exception as e:
start_screen()
logger.critical('произошла критическая ошибка, перезапускаюсь', exc_info=True)
self.listen()
def start_screen():
os.system('cls' if os.name == 'nt' else 'clear')
print(LOGO + '\n\n')
def ask_yes_or_no(question, true_answer='y', false_answer='n', default_answer='', default=True):
true_answer = true_answer.lower()
false_answer = false_answer.lower()
default_answer = default_answer.lower()
output = question.strip() + ' (' + (true_answer.upper() + '/' + false_answer if default else
true_answer + '/' + false_answer.upper()) + '): '
answer = None
while answer not in (true_answer, false_answer, default_answer):
answer = input(output).lower()
if answer == true_answer:
return True
elif answer == false_answer:
return False
else:
return default
def register():
use_webbrowser = ask_yes_or_no('открыть ссылку для авторизации в веб-браузере по умолчанию?')
print()
oauth_url = 'https://oauth.vk.com/authorize?client_id={}&display=page&redirect_uri=https://oauth.vk.com/blank.html&scope=69632&response_type=token'.format(CLIENT_ID)
if use_webbrowser:
webbrowser.open(oauth_url, new=2)
print('в веб-браузере только что была открыта ссылка для авторизации.')
else:
print(oauth_url + '\n')
print('открой в веб-браузере страницу по ссылке выше.')
token = None
while token is None:
user_input = getpass('авторизируйся на открытой странице при необходимости и вставь адресную строку страницы, на которую было осуществлено перенаправление: ')
token = re.search(r'(?:.*access_token=)?([a-f0-9]+).*', user_input)
return token.group(1)
def run(proxy=None, chat_ids=[], objectives=[], auto_login=False):
token = data.get('token')
proxies = data.get('proxies')
if not token or (not auto_login and not ask_yes_or_no('использовать ранее сохранённые данные для авторизации?')):
token = register()
proxies = None
IP_ADDRESS = re.compile(r'((socks5://)|(?:https?://))?(localhost|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5})')
if proxy:
match = IP_ADDRESS.match(proxy)
if not proxy or (not match and not auto_login):
proxy = input('введи адрес прокси-сервера при необходимости его использования: ')
while proxy:
match = IP_ADDRESS.match(proxy)
if match:
break
proxy = input('неверный формат адреса сервера, попробуй ещё раз: ')
else:
match = None
if match:
protocol, use_socks, ip = match.groups()
if not protocol:
use_socks = ask_yes_or_no('использовать протокол socks5 вместо http?') if not auto_login else False
if use_socks:
proxies = {'http': 'socks5://' + ip, 'https': 'socks5://' + ip}
else:
proxies = {'http': 'http://' + ip, 'https': 'https://' + ip}
if auto_login or ask_yes_or_no('сохранить введённые данные для следующих сессий?'):
data['token'] = token
data['proxies'] = proxies
update_data()
start_screen()
if not chat_ids:
chat_ids = data.get('chat_ids')
if not objectives:
objectives = data.get('objectives')
if chat_ids is None or objectives is None or (not auto_login and not ask_yes_or_no('использовать ранее сохранённые данные для работы?')):
chat_ids = list(map(int, input('введи айди конф, в которых нужно защищать рейдеров, через пробел: ').split()))
objectives = list(map(int, input('введи айди защищаемых рейдеров: ').split()))
if auto_login or ask_yes_or_no('сохранить введённые данные для следующих сессий?'):
data['chat_ids'] = chat_ids
data['objectives'] = objectives
update_data()
try:
defender = CLIDefender(token, proxies=proxies)
except InvalidSchema:
sys.exit('необходимо установить дополнительные зависимости для поддержки протокола socks5')
except ApiError:
del data['token']
update_data()
sys.exit('введённый токен недействителен')
except ProxyError:
del data['proxies']
update_data()
sys.exit('не удалось подключиться к прокси-серверу')
defender.run(chat_ids, objectives)
def main():
parser = ArgumentParser(prog='vk-raid-defender', description=__description__, usage='%(prog)s [опции]', add_help=False)
group = parser.add_argument_group('опциональные аргументы')
group.add_argument('-h', '--help', action='help', help='показать это сообщение о помощи и выйти')
group.add_argument('-l', '--login', action='store_true', help='осуществить автоматическую авторизацию')
group.add_argument('-p', '--proxy', metavar='proxy_address', help='адрес прокси-сервера')
group.add_argument('-c', '--chats', type=int, nargs='+', metavar='chat', help='айди конф, в которых нужно защищать рейдеров')
group.add_argument('-u', '--users', type=int, nargs='+', metavar='user', help='айди защищаемых рейдеров')
args = parser.parse_args()
try:
run(args.proxy, args.chats, args.users, args.login)
except KeyboardInterrupt:
print()
sys.exit()
if __name__ == "__main__":
main()
|
r4rdsn/vk-raid-defender
|
vk_raid_defender/cli/cli.py
|
Python
|
mit
| 7,730
|
'''
GameData.py
Last Updated: 3/16/17
'''
import json, os
import numpy as np
import pygame as pg
from GameAssets import GameAssets as ga
class GameData():
"""
GameData class is used to stores game state information.
"""
def __init__(self):
'''
Method initiates game state variables.
'''
self.debug = False
self.game_name = "SpaceManBash"
self.delta_sum = 0
self.running = True
# GameFrome Data
self.frames = []
self.frame_current = None
# Configs
self.screen_dim = (800, 600)
self.controls = {
'LEFT' : pg.K_a,
'RIGHT' : pg.K_d,
'UP' : pg.K_w,
'DOWN' : pg.K_s,
'CROUCH' : pg.K_LALT,
'ATTACK' : pg.K_j,
'ALTATTACK' : pg.K_k,
'JUMP' : pg.K_SPACE,
'SPRINT' : pg.K_LSHIFT,
'PAUSE' : pg.K_ESCAPE,
'ENTER' : pg.K_RETURN,
'HOME' : pg.K_h
}
# Save Data
self.saves = []
self.save_index = None
# Level Data
self.levels = []
self.level_index = 0
self.level_background = None
self.level_midground = None
self.camera_pos = np.array([0.0, 0.0, 0.0, 0.0])
self.camera_limits = [0.0, 0.0, 0.0, 0.0]
self.game_objects = []
self.collisions = {}
self.level_scripts = []
self.script_vars = {}
# Player Data
self.player_pos = np.array([0.0, 0.0])
self.player_health = 100
def switch_frame(self, frame):
'''
Method switches current frame to desired frame. Instantiates desired
frame if not found.
Param:
frame ;GameFrame new current frame
'''
for f in self.frames:
if f.__class__.__name__ == frame:
self.frame_current = f
return
module = __import__("GameFrames")
class_ = getattr(module, frame)
instance = class_(self)
self.frames.append(instance)
self.frame_current = self.frames[-1]
def save_config(self, filename):
'''
Method saves game data configurations to file.
Param:
filename ;str config filename
'''
try:
with open("../data/" + filename, "w") as f:
data = {}
data['controls'] = self.controls
data['screen_dim'] = self.screen_dim
json_dump = json.dumps(data)
f.write(json_dump)
except Exception as e:
print("Could Save Config:", filename)
print(e)
def load_config(self, filename):
'''
Method loads game data configurations to file.
Param:
filename ;str config filename
'''
try:
with open("../data/" + filename, "r") as f:
for json_dump in f:
data = json.loads(json_dump)
self.controls = data['controls']
self.screen_dim = data['screen_dim']
except Exception as e:
print("Could Load Config:", filename)
print(e)
def save_save(self, filename):
'''
Method saves game data state to save file.
Param:
filename ;str save filename
'''
try:
with open("../data/saves/" + filename, "w") as f:
data = {}
data["level_index"] = self.level_index
json_dump = json.dumps(data)
f.write(json_dump + '\n')
except Exception as e:
print("Could Save Save Data:", filename)
print(e)
def load_save(self, filename):
'''
Method loads game data state from save file.
Param:
filename ;str save filename
'''
try:
with open("../data/saves/" + filename, "r") as f:
for json_dump in f:
data = json.loads(json_dump)
self.level_index = data["level_index"]
except Exception as e:
print("Could Load Save Data:", filename)
print(e)
def load_game_data(self):
'''
Method loads all game level data from file.
'''
for filename in sorted(os.listdir("../data/levels/")):
if filename.endswith(".lev"):
try:
with open("../data/levels/" + filename, "r") as f:
self.levels.append(f.read())
except Exception as e:
print("Could Load Game Data:", filename)
print(e)
def load_level(self):
'''
Method loads current level.
'''
try:
data = json.loads(self.levels[self.level_index])
self.camera_pos = np.array(data['camera_pos'])
self.camera_limits = np.array(data['camera_limits'])
for go in data['game_objects']:
module = __import__("GameObjects")
class_ = getattr(module, go[0])
instance = class_(go[1:])
self.add_game_object(instance)
pg.mixer.music.load("../data/music/"+data['music'])
pg.mixer.music.set_volume(0.15)
pg.mixer.music.play(loops=3)
self.level_background = getattr(ga, data['background'])
self.level_midground = getattr(ga, data['midground'])
for script in data['scripts']: self.add_level_script(script)
except Exception as e:
print("Couldn't Load Level:", self.level_index)
print(e)
def reset_level(self):
'''
Method resets current level.
'''
self.frame_current.level_loaded = False
self.game_objects = []
self.collisions = {}
self.load_level()
def switch_level(self, index):
'''
Method switches level.
Param:
index ;int index of desired level
'''
self.level_index = index
self.frame_current.level_loaded = False
self.game_objects = []
self.collisions = {}
self.save_save("save_0.sav")
self.load_level()
def add_game_object(self, game_object):
'''
Method adds game object.
Param:
game_object ;GameObject
'''
self.game_objects.append(game_object)
def remove_game_object(self, game_object):
'''
Method adds game object.
Param:
game_object ;GameObject
'''
self.game_objects.remove(game_object)
def add_level_script(self, script):
'''
'''
self.level_scripts.append(script)
def remove_level_script(self, script):
'''
'''
self.level_scripts.remove(script)
def update_collisions(self):
'''
Method calculates collisions of game objects at current game state.
Collisions are stored in self.collisions dictionary object.
'''
self.collisions = {}
for go in self.game_objects:
temp = []
for goo in self.game_objects:
if go != goo and go.check_collision(goo.rect):
temp.append(goo)
self.collisions[go] = temp
def center_camera_on_game_object(self, game_object):
'''
Method updates camera position to be centered on desired game object while
remaining in the self.camera_limits boundaries.
Param:
game_object ;GameObject
'''
x = -(game_object.rect[0] + (game_object.rect[2]/2.0)) + (self.screen_dim[0]/2.0)
y = -(game_object.rect[1] + (game_object.rect[3]/2.0)) + (self.screen_dim[1]/2.0)
if x < self.camera_limits[2] and x > self.camera_limits[0]: self.camera_pos[0] = x
if y < self.camera_limits[3] and y > self.camera_limits[1]: self.camera_pos[1] = y
|
rz4/SpaceManBash
|
src/GameData.py
|
Python
|
mit
| 8,056
|
"Usage: unparse.py <path to source file>"
import sys
import ast
import cStringIO
import os
# Large float and imaginary literals get turned into infinities in the AST.
# We unparse those infinities to INFSTR.
INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
def interleave(inter, f, seq):
"""Call f on each item in seq, calling inter() in between.
"""
seq = iter(seq)
try:
f(next(seq))
except StopIteration:
pass
else:
for x in seq:
inter()
f(x)
class UTree:
def __init__(self,name=None):
self._name = name
self._attr = {}
def addAttr(self, attr, value):
self._attr[attr] = value
def output(self,level,tree=None):
for i in range(level):
print "\t"
if tree==None:
print "Type:",self._name
for i in self._attr:
self.output(level+1,self._attr[i])
elif tree.__class__=="UTree":
print "Type:",tree._name
for i in tree._attr:
self.output(level+1,tree._attr[i])
else:
print "Other:",tree
class Unifier:
def __init__(self, tree1, tree2):
self._t1 = tree1
self._t2 = tree2
self._utree = UTree()
self._utree = self.compare_trees(self._t1, self._t2)
def compare_trees(self,t1,t2):
if self.check_entire_tree(t1,t2):
print "t1=t2"
return t1
else:
if self.check_tree_name(t1, t2):
print "t1,t2 have same type:",t1.__class__.__name__
utree = UTree(t1.__class__.__name__)
meth = getattr(self, "_"+t1.__class__.__name__)
(vals,nodes) = meth(t1,t2)
print nodes
for attr in nodes:
node = nodes[attr]
utree.addAttr(attr, self.compare_trees(node[0], node[1]))
if not vals and not nodes:
print "t1,t2 have different numbers of attributes"
return ("???",t1,t2)
return utree
else:
print "t1,t2 have different types:",t1.__class__.__name__,",",t2.__class__.__name__
return ("???",t1,t2)
def _Module(self, t1,t2):
nodes = {}
if len(t1.body)!=len(t2.body):
return (None, None)
for i in range(len(t1.body)):
nodes["body["+str(i)+"]"]=[t1.body[i],t2.body[i]]
return ([],nodes)
def _If(self, t1,t2):
node = {}
node["test"] = [t1.test, t2.test]
node["body"] = [t1.body, t2.body]
# collapse nested ifs into equivalent elifs.
elif_counter = -1
while True:
has_elif1 = t1.orelse and len(t1.orelse) == 1 and isinstance(t1.orelse[0], ast.If)
has_elif2 = t2.orelse and len(t2.orelse) == 1 and isinstance(t2.orelse[0], ast.If)
if has_elif1 and has_elif2:
elif_counter+=1
t1 = t1.orelse[0]
t2 = t2.orelse[0]
node["elif["+str(elif_counter)+"].test"] = [t1.test, t2.test]
node["elif["+str(elif_counter)+"].body"] = [t1.body, t2.body]
elif not has_elif1 and not has_elif2:
break
else:
return (None,None)
# final else
if t1.orelse and t2.orelse:
node["orelse"]=[t1.orelse,t2.orelse]
elif not t1.orelse and not t2.orelse:
pass
else:
return (None, None)
return ([],node)
def check_entire_tree(self,t1,t2):
if t1==t2:
return True
else:
return False
def check_tree_name(self,t1,t2):
if t1.__class__.__name__==t2.__class__.__name__:
return True
else:
return False
def check_value(self,v1,v2):
if v1==v2:
return True
else:
return False
class Unparser:
"""Methods in this class recursively traverse an AST and
output source code for the abstract syntax; original formatting
is disregarded. """
def __init__(self, tree, lines, src_ast, file = sys.stdout):
"""Unparser(tree, file=sys.stdout) -> None.
Print the source for tree to file."""
self.calls = self.crop_calls(lines, src_ast)
self.mod_calls = []
self.functions = src_ast.functions
self.classes = src_ast.classes
self.lines = lines
self.cur_call = -1
self.incall = False
self.cur_str = ""
self.ret_str = False
self.toplevel = True
self.f = file
self.future_imports = []
self._indent = 0
self.dispatch(tree)
self.f.write("\n")
print self.mod_calls
self.f.flush()
def crop_calls(self, lines, src_ast):
calls = []
for i in src_ast.calls:
if i.line<lines[0] or i.line>lines[1]:
continue
else:
calls.append(i)
print "======================="
print "Name:"+str(i.name)
print "line:"+str(i.line)
print "scope:"+str(i.scope)
print "source:"+str(i.source)
print "tree:"+str(i.tree)
print "======================="
return calls
def call_dealer(self,tree):
#self.write("CALL_HERE"+str(tree.lineno)+","+str(tree.col_offset))
def check_reachable(type,id):
if type[id].lines[0]>=self.lines[0] and type[id].lines[1]<=self.lines[1]:
return True
else:
return False
def process_mod_call(call):
self.write("unreachable_method["+str(len(self.mod_calls))+"]")
#self.write("$CALL:"+str(call.source)+"$")
self.mod_calls.append(call)
self.cur_call+=1
call = self.calls[self.cur_call]
if isinstance(call.source, tuple):
source = call.source
else:
source = ("Unknown", call.source)
if source==("Unknown",-1) or source==("member",-1):
return False
elif source[0]=="function":
id = source[1]
if check_reachable(self.functions,id):
return False
else:
process_mod_call(call)
return True
elif source[0]=="class":
id = source[1]
if check_reachable(self.classes,id):
return False
else:
process_mod_call(call)
return True
else: #call import
process_mod_call(call)
return True
def fill(self, text = ""):
"Indent a piece of text, according to the current indentation level"
if not self.ret_str:
self.f.write("\n"+" "*self._indent + text)
else:
self.cur_str+=("\n"+" "*self._indent + text)
def write(self, text):
"Append a piece of text to the current line."
if not self.ret_str:
self.f.write(text)
else:
self.cur_str+=(text)
def enter(self):
"Print ':', and increase the indentation."
self.write(":")
self._indent += 1
def leave(self):
"Decrease the indentation level."
self._indent -= 1
def dispatch(self, tree):
"Dispatcher function, dispatching tree type T to method _T."
if isinstance(tree, list):
for t in tree:
self.dispatch(t)
return
meth = getattr(self, "_"+tree.__class__.__name__)
meth(tree)
############### Unparsing methods ######################
# There should be one method per concrete grammar type #
# Constructors should be grouped by sum type. Ideally, #
# this would follow the order in the grammar, but #
# currently doesn't. #
########################################################
def _Module(self, tree):
for stmt in tree.body:
self.dispatch(stmt)
# stmt
def _Expr(self, tree):
self.fill()
self.dispatch(tree.value)
def _Import(self, t):
self.fill("import ")
interleave(lambda: self.write(", "), self.dispatch, t.names)
def _ImportFrom(self, t):
# A from __future__ import may affect unparsing, so record it.
if t.module and t.module == '__future__':
self.future_imports.extend(n.name for n in t.names)
self.fill("from ")
self.write("." * t.level)
if t.module:
self.write(t.module)
self.write(" import ")
interleave(lambda: self.write(", "), self.dispatch, t.names)
def _Assign(self, t):
self.fill()
for target in t.targets:
self.dispatch(target)
self.write(" = ")
self.dispatch(t.value)
def _AugAssign(self, t):
self.fill()
self.dispatch(t.target)
self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
self.dispatch(t.value)
def _Return(self, t):
self.fill("return")
if t.value:
self.write(" ")
self.dispatch(t.value)
def _Pass(self, t):
self.fill("pass")
def _Break(self, t):
self.fill("break")
def _Continue(self, t):
self.fill("continue")
def _Delete(self, t):
self.fill("del ")
interleave(lambda: self.write(", "), self.dispatch, t.targets)
def _Assert(self, t):
self.fill("assert ")
self.dispatch(t.test)
if t.msg:
self.write(", ")
self.dispatch(t.msg)
def _Exec(self, t):
self.fill("exec ")
self.dispatch(t.body)
if t.globals:
self.write(" in ")
self.dispatch(t.globals)
if t.locals:
self.write(", ")
self.dispatch(t.locals)
def _Print(self, t):
self.fill("print ")
do_comma = False
if t.dest:
self.write(">>")
self.dispatch(t.dest)
do_comma = True
for e in t.values:
if do_comma:self.write(", ")
else:do_comma=True
self.dispatch(e)
if not t.nl:
self.write(",")
def _Global(self, t):
self.fill("global ")
interleave(lambda: self.write(", "), self.write, t.names)
def _Yield(self, t):
self.write("(")
self.write("yield")
if t.value:
self.write(" ")
self.dispatch(t.value)
self.write(")")
def _Raise(self, t):
self.fill('raise ')
if t.type:
self.dispatch(t.type)
if t.inst:
self.write(", ")
self.dispatch(t.inst)
if t.tback:
self.write(", ")
self.dispatch(t.tback)
def _TryExcept(self, t):
self.fill("try")
self.enter()
self.dispatch(t.body)
self.leave()
for ex in t.handlers:
self.dispatch(ex)
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _TryFinally(self, t):
if len(t.body) == 1 and isinstance(t.body[0], ast.TryExcept):
# try-except-finally
self.dispatch(t.body)
else:
self.fill("try")
self.enter()
self.dispatch(t.body)
self.leave()
self.fill("finally")
self.enter()
self.dispatch(t.finalbody)
self.leave()
def _ExceptHandler(self, t):
self.fill("except")
if t.type:
self.write(" ")
self.dispatch(t.type)
if t.name:
self.write(" as ")
self.dispatch(t.name)
self.enter()
self.dispatch(t.body)
self.leave()
def _ClassDef(self, t):
self.write("\n")
for deco in t.decorator_list:
self.fill("@")
self.dispatch(deco)
self.fill("class "+t.name)
if t.bases:
self.write("(")
for a in t.bases:
self.dispatch(a)
self.write(", ")
self.write(")")
self.enter()
self.dispatch(t.body)
self.leave()
def _FunctionDef(self, t):
self.write("\n")
for deco in t.decorator_list:
self.fill("@")
self.dispatch(deco)
self.fill("def "+t.name + "(")
self.dispatch(t.args)
self.write(")")
self.enter()
self.dispatch(t.body)
self.leave()
def _For(self, t):
self.fill("for ")
self.dispatch(t.target)
self.write(" in ")
self.dispatch(t.iter)
self.enter()
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _If(self, t):
self.fill("if ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
# collapse nested ifs into equivalent elifs.
while (t.orelse and len(t.orelse) == 1 and
isinstance(t.orelse[0], ast.If)):
t = t.orelse[0]
self.fill("elif ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
# final else
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _While(self, t):
self.fill("while ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _With(self, t):
self.fill("with ")
self.dispatch(t.context_expr)
if t.optional_vars:
self.write(" as ")
self.dispatch(t.optional_vars)
self.enter()
self.dispatch(t.body)
self.leave()
# expr
def _Str(self, tree):
# if from __future__ import unicode_literals is in effect,
# then we want to output string literals using a 'b' prefix
# and unicode literals with no prefix.
if "unicode_literals" not in self.future_imports:
self.write(repr(tree.s))
elif isinstance(tree.s, str):
self.write("b" + repr(tree.s))
elif isinstance(tree.s, unicode):
self.write(repr(tree.s).lstrip("u"))
else:
assert False, "shouldn't get here"
def _Name(self, t):
self.write(t.id)
def _Repr(self, t):
self.write("`")
self.dispatch(t.value)
self.write("`")
def _Num(self, t):
repr_n = repr(t.n)
# Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
if repr_n.startswith("-"):
self.write("(")
# Substitute overflowing decimal literal for AST infinities.
self.write(repr_n.replace("inf", INFSTR))
if repr_n.startswith("-"):
self.write(")")
def _List(self, t):
self.write("[")
interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write("]")
def _ListComp(self, t):
self.write("[")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write("]")
def _GeneratorExp(self, t):
self.write("(")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write(")")
def _SetComp(self, t):
self.write("{")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write("}")
def _DictComp(self, t):
self.write("{")
self.dispatch(t.key)
self.write(": ")
self.dispatch(t.value)
for gen in t.generators:
self.dispatch(gen)
self.write("}")
def _comprehension(self, t):
self.write(" for ")
self.dispatch(t.target)
self.write(" in ")
self.dispatch(t.iter)
for if_clause in t.ifs:
self.write(" if ")
self.dispatch(if_clause)
def _IfExp(self, t):
self.write("(")
self.dispatch(t.body)
self.write(" if ")
self.dispatch(t.test)
self.write(" else ")
self.dispatch(t.orelse)
self.write(")")
def _Set(self, t):
assert(t.elts) # should be at least one element
self.write("{")
interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write("}")
def _Dict(self, t):
self.write("{")
def write_pair(pair):
(k, v) = pair
self.dispatch(k)
self.write(": ")
self.dispatch(v)
interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
self.write("}")
def _Tuple(self, t):
self.write("(")
if len(t.elts) == 1:
(elt,) = t.elts
self.dispatch(elt)
self.write(",")
else:
interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write(")")
unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
def _UnaryOp(self, t):
self.write("(")
self.write(self.unop[t.op.__class__.__name__])
self.write(" ")
# If we're applying unary minus to a number, parenthesize the number.
# This is necessary: -2147483648 is different from -(2147483648) on
# a 32-bit machine (the first is an int, the second a long), and
# -7j is different from -(7j). (The first has real part 0.0, the second
# has real part -0.0.)
if isinstance(t.op, ast.USub) and isinstance(t.operand, ast.Num):
self.write("(")
self.dispatch(t.operand)
self.write(")")
else:
self.dispatch(t.operand)
self.write(")")
binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
"LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
"FloorDiv":"//", "Pow": "**"}
def _BinOp(self, t):
self.write("(")
self.dispatch(t.left)
self.write(" " + self.binop[t.op.__class__.__name__] + " ")
self.dispatch(t.right)
self.write(")")
cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
"Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
def _Compare(self, t):
self.write("(")
self.dispatch(t.left)
for o, e in zip(t.ops, t.comparators):
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
self.dispatch(e)
self.write(")")
boolops = {ast.And: 'and', ast.Or: 'or'}
def _BoolOp(self, t):
self.write("(")
s = " %s " % self.boolops[t.op.__class__]
interleave(lambda: self.write(s), self.dispatch, t.values)
self.write(")")
def _Attribute(self,t):
self.dispatch(t.value)
# Special case: 3.__abs__() is a syntax error, so if t.value
# is an integer literal then we need to either parenthesize
# it or add an extra space to get 3 .__abs__().
if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
self.write(" ")
self.write(".")
self.write(t.attr)
def _Call(self, t):
mod = False
if not self.incall:
if self.call_dealer(t):
mod = True
self.incall = True
if not mod:
self.dispatch(t.func)
else:
self.ret_str = True
self.dispatch(t.func)
self.ret_str = False
self.mod_calls[-1] = self.cur_str
self.cur_str = ""
self.write("(")
comma = False
for e in t.args:
if comma: self.write(", ")
else: comma = True
self.dispatch(e)
for e in t.keywords:
if comma: self.write(", ")
else: comma = True
self.dispatch(e)
if t.starargs:
if comma: self.write(", ")
else: comma = True
self.write("*")
self.dispatch(t.starargs)
if t.kwargs:
if comma: self.write(", ")
else: comma = True
self.write("**")
self.dispatch(t.kwargs)
self.write(")")
self.incall = False
def _Subscript(self, t):
self.dispatch(t.value)
self.write("[")
self.dispatch(t.slice)
self.write("]")
# slice
def _Ellipsis(self, t):
self.write("...")
def _Index(self, t):
self.dispatch(t.value)
def _Slice(self, t):
if t.lower:
self.dispatch(t.lower)
self.write(":")
if t.upper:
self.dispatch(t.upper)
if t.step:
self.write(":")
self.dispatch(t.step)
def _ExtSlice(self, t):
interleave(lambda: self.write(', '), self.dispatch, t.dims)
# others
def _arguments(self, t):
first = True
# normal arguments
defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
for a,d in zip(t.args, defaults):
if first:first = False
else: self.write(", ")
self.dispatch(a),
if d:
self.write("=")
self.dispatch(d)
# varargs
if t.vararg:
if first:first = False
else: self.write(", ")
self.write("*")
self.write(t.vararg)
# kwargs
if t.kwarg:
if first:first = False
else: self.write(", ")
self.write("**"+t.kwarg)
def _keyword(self, t):
self.write(t.arg)
self.write("=")
self.dispatch(t.value)
def _Lambda(self, t):
self.write("(")
self.write("lambda ")
self.dispatch(t.args)
self.write(": ")
self.dispatch(t.body)
self.write(")")
def _alias(self, t):
self.write(t.name)
if t.asname:
self.write(" as "+t.asname)
def roundtrip(filename1, filename2, output=sys.stdout):
with open(filename1, "r") as pyfile:
source = pyfile.read()
print source
tree1 = compile(source, filename1, "exec", ast.PyCF_ONLY_AST)
with open(filename2, "r") as pyfile:
source = pyfile.read()
tree2 = compile(source, filename2, "exec", ast.PyCF_ONLY_AST)
Unparser(tree1, output)
#mtree = Unifier(tree1, tree2)
#mtree._utree.output(0)
def testdir(a):
try:
names = [n for n in os.listdir(a) if n.endswith('.py')]
except OSError:
sys.stderr.write("Directory not readable: %s" % a)
else:
for n in names:
fullname = os.path.join(a, n)
if os.path.isfile(fullname):
output = cStringIO.StringIO()
print 'Testing %s' % fullname
try:
roundtrip(fullname, output)
except Exception as e:
print ' Failed to compile, exception is %s' % repr(e)
elif os.path.isdir(fullname):
testdir(fullname)
def main(args):
#if args[0] == '--testdir':
# for a in args[1:]:
# testdir(a)
#else:
# for a in args:
roundtrip("./helper.py","./helper2.py")
def generateNewCode(source, lines, src_ast, output=sys.stdout):
tree = compile(source, "", "exec", ast.PyCF_ONLY_AST)
for i in src_ast.functions:
print "======================="
print "ID:"+str(i.id)
print "Name:"+i.name
print "lines:"+str(i.lines)
print "scope:"+str(i.env)
print "member:"+str(i.ismember)
print "======================="
Unparser(tree, lines, src_ast, output)
if __name__=='__main__':
main(sys.argv[1:])
|
h2oloopan/easymerge
|
EasyMerge/merger/unparse.py
|
Python
|
mit
| 24,407
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup file for youtube_podcaster.
This file was generated with PyScaffold 2.4.2, a tool that easily
puts up a scaffold for your new Python project. Learn more under:
http://pyscaffold.readthedocs.org/
"""
import sys
from setuptools import setup
def setup_package():
needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['six', 'pyscaffold>=2.4rc1,<2.5a0'] + sphinx,
tests_require=['pytest_cov', 'pytest'],
use_pyscaffold=True)
if __name__ == "__main__":
setup_package()
|
tomvanderlee/youtube-podcaster
|
setup.py
|
Python
|
mit
| 660
|
#!/usr/bin/env python2.7
# -*- mode: python; coding: utf-8; -*-
"""Module for generating lexicon using Rao and Ravichandran's method (2009).
"""
##################################################################
# Imports
from __future__ import unicode_literals, print_function
from blair_goldensohn import build_mtx, seeds2seedpos
from common import POSITIVE, NEGATIVE, NEUTRAL
from graph import Graph
from itertools import chain
from scipy import sparse
import numpy as np
import sys
##################################################################
# Constants
POS_IDX = 0
NEG_IDX = 1
NEUT_IDX = 2
POL_IDX = 1
SCORE_IDX = 2
MAX_I = 300
IDX2CLS = {POS_IDX: POSITIVE, NEG_IDX: NEGATIVE, NEUT_IDX: NEUTRAL}
##################################################################
# Methods
def _eq_sparse(a_M1, a_M2):
"""Compare two sparse matrices.
@param a_M1 - first sparse matrix to compare
@param a_M2 - second sparse matrix to compare
@return True if both matrices are equal, non-False otherwise
"""
if type(a_M1) != type(a_M2):
return False
if not np.allclose(a_M1.get_shape(), a_M1.get_shape()):
return False
X, Y = a_M1.nonzero()
IDX1 = set([(x, y) for x, y in zip(X, Y)])
X, Y = a_M2.nonzero()
IDX2 = [(x, y) for x, y in zip(X, Y) if (x, y) not in IDX1]
IDX = list(IDX1)
IDX.extend(IDX2)
IDX.sort()
for x_i, y_i in IDX:
# print("a_M1[{:d}, {:d}] = {:f}".format(x_i, y_i, a_M1[x_i, y_i]))
# print("a_M2[{:d}, {:d}] = {:f}".format(x_i, y_i, a_M2[x_i, y_i]))
# print("is_close", np.isclose(a_M1[x_i, y_i], a_M2[x_i, y_i]))
if not np.isclose(a_M1[x_i, y_i], a_M2[x_i, y_i]):
return False
return True
def _mtx2tlist(a_Y, a_term2idx):
"""Convert matrix to a list of polar terms.
@param a_Y - matrix of polar terms
@param a_terms2idx - mapping from terms to their matrix indices
@return list of 3-tuples (word, polarity, score)
"""
ret = []
iscore = 0.
irow = None
lex2lidx = {}
ipol = lidx = 0
for (iword, ipos), idx in a_term2idx.iteritems():
# obtain matrix row for that term
irow = a_Y.getrow(idx).toarray()
# print("irow =", repr(irow))
ipol = irow.argmax(axis=1)[0]
iscore = irow[0, ipol]
# print("ipol =", repr(ipol))
# print("iscore =", repr(iscore))
if ipol != NEUT_IDX:
ipol = IDX2CLS[ipol]
if iword in lex2lidx:
lidx = lex2lidx[iword]
if abs(iscore) > abs(ret[lidx][SCORE_IDX]):
ret[lidx][POL_IDX] = ipol
ret[lidx][SCORE_IDX] = iscore
else:
lex2lidx[iword] = len(ret)
ret.append([iword, ipol, iscore])
return ret
def _sign_normalize(a_Y, a_terms2idx, a_pos, a_neg, a_neut,
a_set_dflt=None):
"""Fix seed values and row-normalize the class matrix.
@param a_Y - class matrix to be changed
@param a_terms2idx - mapping from terms to their matrix indices
@param a_pos - set of lexemes with positive polarity
@param a_neg - set of lexemes with negative polarity
@param a_neut - set of lexemes with neutral polarity
@param a_set_dflt - function to set the default value of an unkown term
@return void
@note modifies the input matrix in place
"""
seed_found = False
for iterm, i in a_terms2idx.iteritems():
if iterm in a_pos:
seed_found = True
a_Y[i, :] = 0.
a_Y[i, POS_IDX] = 1.
elif iterm in a_neg:
seed_found = True
a_Y[i, :] = 0.
a_Y[i, NEG_IDX] = 1.
elif iterm in a_neut:
seed_found = True
a_Y[i, :] = 0.
a_Y[i, NEUT_IDX] = 1.
elif a_set_dflt is not None:
a_set_dflt(a_Y, i)
assert seed_found, "No seed term found in matrix."
# normalize class scores
Z = a_Y.sum(1)
x, y = a_Y.nonzero()
for i, j in zip(x, y):
# print("a_Y[{:d}, {:d}] =".format(i, j), repr(a_Y[i, j]))
# print("Z[{:d}, 0] =".format(i), repr(Z[i, 0]))
a_Y[i, j] /= float(Z[i, 0]) or 1.
# print("*a_Y[{:d}, {:d}] =".format(i, j), repr(a_Y[i, j]))
def prune_normalize(a_M):
"""Make each of the adjacency matrix sum up to one.
Args:
a_M (scipy.sparse.csr): matrix to be normalized
Returns:
void:
Note:
modifies the input matrix in place
"""
# remove negative transitions
nonzero_xy = a_M.nonzero()
for i, j in zip(*nonzero_xy):
if a_M[i, j] < 0.:
a_M[i, j] = 0.
a_M.prune()
# normalize all outgoing transitions
Z = a_M.sum(0)
nonzero_xy = a_M.nonzero()
for i, j in zip(*nonzero_xy):
a_M[i, j] /= float(Z[0, j]) or 1.
def rao_min_cut(a_germanet, a_pos, a_neg, a_neut, a_seed_pos,
a_ext_syn_rels):
"""Extend sentiment lexicons using the min-cut method of Rao (2009).
@param a_germanet - GermaNet instance
@param a_pos - set of lexemes with positive polarity
@param a_neg - set of lexemes with negative polarity
@param a_neut - set of lexemes with neutral polarity
@param a_seed_pos - part-of-speech class of seed synsets ("none" for no
restriction)
@param a_ext_syn_rels - use extended set of synonymous relations
@return list of polar terms, their polarities, and scores
"""
sgraph = Graph(a_germanet, a_ext_syn_rels)
# partition the graph into subjective and objective terms
mcs, cut_edges, _, _ = sgraph.min_cut(a_pos | a_neg, a_neut, a_seed_pos)
print("min_cut_score (subj. vs. obj.) = {:d}".format(mcs),
file=sys.stderr)
# remove edges belonging to the min cut (i.e., cut the graph)
for isrc, itrg in cut_edges:
if isrc in sgraph.nodes:
sgraph.nodes[isrc].pop(itrg, None)
# separate the graph into positive and negative terms
mcs, _, pos, neg = sgraph.min_cut(a_pos, a_neg, a_seed_pos)
print("min_cut_score (pos. vs. neg.) = {:d}".format(mcs),
file=sys.stderr)
ret = [(inode[0], POSITIVE, 1.) for inode in pos]
ret.extend((inode[0], NEGATIVE, -1.) for inode in neg)
return ret
def rao_lbl_prop(a_germanet, a_pos, a_neg, a_neut, a_seed_pos,
a_ext_syn_rels):
"""Extend sentiment lexicons using the lbl-prop method of Rao (2009).
@param a_germanet - GermaNet instance
@param a_pos - set of lexemes with positive polarity
@param a_neg - set of lexemes with negative polarity
@param a_neut - set of lexemes with neutral polarity
@param a_seed_pos - part-of-speech class of seed synsets ("none" for no
restriction)
@param a_ext_syn_rels - use extended set of synonymous relations
@return list of polar terms, their polarities, and scores
"""
if a_seed_pos is None:
a_seed_pos = ["adj", "nomen", "verben"]
else:
a_seed_pos = [a_seed_pos]
a_pos = seeds2seedpos(a_pos, a_seed_pos)
a_neg = seeds2seedpos(a_neg, a_seed_pos)
a_neut = seeds2seedpos(a_neut, a_seed_pos)
# obtain and row-normalize the adjacency matrix
terms = set((ilex, ipos)
for isynid, ipos in a_germanet.synid2pos.iteritems()
for ilexid in a_germanet.synid2lexids[isynid]
for ilex in a_germanet.lexid2lex[ilexid]
)
terms2idx = {iterm: i for i, iterm in enumerate(terms)}
M = build_mtx(a_germanet, terms2idx, set(),
a_ext_syn_rels, len(terms))
prune_normalize(M)
# no need to transpose M[i, j] is the link going from node j to the node i;
# and, in Y, the Y[j, k] cell is the polarity score of the class k for the
# term j
# M = M.transpose()
# check that the matrix is column normalized
assert np.all(i == 0 or np.isclose([i], [1.])
for i in M.sum(0)[0, :])
# initialize label matrix
Y = sparse.lil_matrix((len(terms), len(IDX2CLS)), dtype=np.float32)
def _set_neut_one(X, i):
X[i, NEUT_IDX] = 1.
_sign_normalize(Y, terms2idx, a_pos, a_neg, a_neut,
_set_neut_one)
# Y = Y.tocsr()
# output first M row and Y column
# for i in xrange(len(terms)):
# if M[0, i] != 0:
# print("M[0, {:d}] =".format(i), M[0, i], file=sys.stderr)
# if Y[i, 0] != 0:
# print("Y[i, 0] =", Y[i, 0], file=sys.stderr)
# B = M.dot(Y)
# print("B[0, 0] =", B[0, 0], file=sys.stderr)
# perform multiplication until convergence
i = 0
prev_Y = None
while not _eq_sparse(prev_Y, Y) and i < MAX_I:
prev_Y = Y.copy()
Y = Y.tocsc()
Y = M.dot(Y)
Y = Y.tolil()
_sign_normalize(Y, terms2idx, a_pos, a_neg, a_neut)
i += 1
ret = _mtx2tlist(Y, terms2idx)
ret.sort(key=lambda el: abs(el[-1]), reverse=True)
return ret
|
WladimirSidorenko/SentiLex
|
scripts/rao.py
|
Python
|
mit
| 8,962
|
# Copyright (c) 2019 Valentin Valls <valentin.valls@esrf.fr>
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
"""Astroid hooks for scipy.signal module."""
import astroid
def scipy_signal():
return astroid.parse(
"""
# different functions defined in scipy.signals
def barthann(M, sym=True):
return numpy.ndarray([0])
def bartlett(M, sym=True):
return numpy.ndarray([0])
def blackman(M, sym=True):
return numpy.ndarray([0])
def blackmanharris(M, sym=True):
return numpy.ndarray([0])
def bohman(M, sym=True):
return numpy.ndarray([0])
def boxcar(M, sym=True):
return numpy.ndarray([0])
def chebwin(M, at, sym=True):
return numpy.ndarray([0])
def cosine(M, sym=True):
return numpy.ndarray([0])
def exponential(M, center=None, tau=1.0, sym=True):
return numpy.ndarray([0])
def flattop(M, sym=True):
return numpy.ndarray([0])
def gaussian(M, std, sym=True):
return numpy.ndarray([0])
def general_gaussian(M, p, sig, sym=True):
return numpy.ndarray([0])
def hamming(M, sym=True):
return numpy.ndarray([0])
def hann(M, sym=True):
return numpy.ndarray([0])
def hanning(M, sym=True):
return numpy.ndarray([0])
def impulse2(system, X0=None, T=None, N=None, **kwargs):
return numpy.ndarray([0]), numpy.ndarray([0])
def kaiser(M, beta, sym=True):
return numpy.ndarray([0])
def nuttall(M, sym=True):
return numpy.ndarray([0])
def parzen(M, sym=True):
return numpy.ndarray([0])
def slepian(M, width, sym=True):
return numpy.ndarray([0])
def step2(system, X0=None, T=None, N=None, **kwargs):
return numpy.ndarray([0]), numpy.ndarray([0])
def triang(M, sym=True):
return numpy.ndarray([0])
def tukey(M, alpha=0.5, sym=True):
return numpy.ndarray([0])
"""
)
astroid.register_module_extender(astroid.MANAGER, "scipy.signal", scipy_signal)
|
ruchee/vimrc
|
vimfiles/bundle/vim-python/submodules/astroid/astroid/brain/brain_scipy_signal.py
|
Python
|
mit
| 2,437
|
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import re
import syslog
from logging import Handler
from logging.handlers import SysLogHandler
class LocalSysLogHandler(Handler):
"""
Logging handler that logs to the local syslog using the syslog module
"""
facility_names = {
"auth": syslog.LOG_AUTH,
"cron": syslog.LOG_CRON,
"daemon": syslog.LOG_DAEMON,
"kern": syslog.LOG_KERN,
"lpr": syslog.LOG_LPR,
"mail": syslog.LOG_MAIL,
"news": syslog.LOG_NEWS,
"syslog": syslog.LOG_SYSLOG,
"user": syslog.LOG_USER,
"uucp": syslog.LOG_UUCP,
"local0": syslog.LOG_LOCAL0,
"local1": syslog.LOG_LOCAL1,
"local2": syslog.LOG_LOCAL2,
"local3": syslog.LOG_LOCAL3,
"local4": syslog.LOG_LOCAL4,
"local5": syslog.LOG_LOCAL5,
"local6": syslog.LOG_LOCAL6,
"local7": syslog.LOG_LOCAL7,
}
priority_map = {
"DEBUG": syslog.LOG_DEBUG,
"INFO": syslog.LOG_INFO,
"WARNING": syslog.LOG_WARNING,
"ERROR": syslog.LOG_ERR,
"CRITICAL": syslog.LOG_CRIT
}
def __init__(self, ident=None, facility=syslog.LOG_USER, log_pid=False):
Handler.__init__(self)
self.facility = facility
if isinstance(facility, basestring):
self.facility = self.facility_names[facility]
options = 0
if log_pid:
options |= syslog.LOG_PID
syslog.openlog(ident, options, self.facility)
self.formatter = None
def close(self):
Handler.close(self)
syslog.closelog()
def emit(self, record):
try:
msg = self.format(record)
if isinstance(msg, unicode):
msg = msg.encode('utf-8')
priority = self.priority_map[record.levelname]
for m in msg.splitlines():
syslog.syslog(self.facility | priority, m)
except StandardError:
self.handleError(record)
|
lalinsky/mb2freedb
|
mb2freedb/utils.py
|
Python
|
mit
| 2,120
|
"""
Q4- Write a Python function, odd, that takes in one number and returns True when the number is odd and False otherwise. You should use the % (mod) operator, not if. This function takes in one number and returns a boolean
"""
def odd( number ):
return number % 2 == 1
number = int( input( "Enter a number: ") )
print( "Is the number " + str( number ) + " odd? Answer: " + str( odd( number) ) )
|
SuyashD95/python-assignments
|
Assignment 3/odd.py
|
Python
|
mit
| 403
|
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google BigQuery API."""
from google.cloud.client import ClientWithProject
from google.cloud.bigquery._http import Connection
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.job import CopyJob
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import QueryJob
from google.cloud.bigquery.query import QueryResults
from google.cloud.iterator import HTTPIterator
class Project(object):
"""Wrapper for resource describing a BigQuery project.
:type project_id: str
:param project_id: Opaque ID of the project
:type numeric_id: int
:param numeric_id: Numeric ID of the project
:type friendly_name: str
:param friendly_name: Display name of the project
"""
def __init__(self, project_id, numeric_id, friendly_name):
self.project_id = project_id
self.numeric_id = numeric_id
self.friendly_name = friendly_name
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct an instance from a resource dict."""
return cls(
resource['id'], resource['numericId'], resource['friendlyName'])
class Client(ClientWithProject):
"""Client to bundle configuration needed for API requests.
:type project: str
:param project: the project which the client acts on behalf of. Will be
passed when creating a dataset / job. If not passed,
falls back to the default inferred from the environment.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``_http`` object is
passed), falls back to the default inferred from the
environment.
:type _http: :class:`~httplib2.Http`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~httplib2.Http.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
"""
SCOPE = ('https://www.googleapis.com/auth/bigquery',
'https://www.googleapis.com/auth/cloud-platform')
"""The scopes required for authenticating as a BigQuery consumer."""
def __init__(self, project=None, credentials=None, _http=None):
super(Client, self).__init__(
project=project, credentials=credentials, _http=_http)
self._connection = Connection(self)
def list_projects(self, max_results=None, page_token=None):
"""List projects for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list
:type max_results: int
:param max_results: maximum number of projects to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of projects. If
not passed, the API will return the first page of
projects.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.client.Project`
accessible to the current client.
"""
return HTTPIterator(
client=self, path='/projects', item_to_value=_item_to_project,
items_key='projects', page_token=page_token,
max_results=max_results)
def list_datasets(self, include_all=False, max_results=None,
page_token=None):
"""List datasets for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
:type include_all: bool
:param include_all: True if results include hidden datasets.
:type max_results: int
:param max_results: maximum number of datasets to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.dataset.Dataset`.
accessible to the current client.
"""
extra_params = {}
if include_all:
extra_params['all'] = True
path = '/projects/%s/datasets' % (self.project,)
return HTTPIterator(
client=self, path=path, item_to_value=_item_to_dataset,
items_key='datasets', page_token=page_token,
max_results=max_results, extra_params=extra_params)
def dataset(self, dataset_name, project=None):
"""Construct a dataset bound to this client.
:type dataset_name: str
:param dataset_name: Name of the dataset.
:type project: str
:param project: (Optional) project ID for the dataset (defaults to
the project of the client).
:rtype: :class:`google.cloud.bigquery.dataset.Dataset`
:returns: a new ``Dataset`` instance
"""
return Dataset(dataset_name, client=self, project=project)
def job_from_resource(self, resource):
"""Detect correct job type from resource and instantiate.
:type resource: dict
:param resource: one job resource from API response
:rtype: One of:
:class:`google.cloud.bigquery.job.LoadTableFromStorageJob`,
:class:`google.cloud.bigquery.job.CopyJob`,
:class:`google.cloud.bigquery.job.ExtractTableToStorageJob`,
:class:`google.cloud.bigquery.job.QueryJob`,
:class:`google.cloud.bigquery.job.RunSyncQueryJob`
:returns: the job instance, constructed via the resource
"""
config = resource['configuration']
if 'load' in config:
return LoadTableFromStorageJob.from_api_repr(resource, self)
elif 'copy' in config:
return CopyJob.from_api_repr(resource, self)
elif 'extract' in config:
return ExtractTableToStorageJob.from_api_repr(resource, self)
elif 'query' in config:
return QueryJob.from_api_repr(resource, self)
raise ValueError('Cannot parse job resource')
def list_jobs(self, max_results=None, page_token=None, all_users=None,
state_filter=None):
"""List jobs for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list
:type max_results: int
:param max_results: maximum number of jobs to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of jobs. If
not passed, the API will return the first page of
jobs.
:type all_users: bool
:param all_users: if true, include jobs owned by all users in the
project.
:type state_filter: str
:param state_filter: if passed, include only jobs matching the given
state. One of
* ``"done"``
* ``"pending"``
* ``"running"``
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterable of job instances.
"""
extra_params = {'projection': 'full'}
if all_users is not None:
extra_params['allUsers'] = all_users
if state_filter is not None:
extra_params['stateFilter'] = state_filter
path = '/projects/%s/jobs' % (self.project,)
return HTTPIterator(
client=self, path=path, item_to_value=_item_to_job,
items_key='jobs', page_token=page_token,
max_results=max_results, extra_params=extra_params)
def load_table_from_storage(self, job_name, destination, *source_uris):
"""Construct a job for loading data into a table from CloudStorage.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be loaded.
:type source_uris: sequence of string
:param source_uris: URIs of data files to be loaded; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
:returns: a new ``LoadTableFromStorageJob`` instance
"""
return LoadTableFromStorageJob(job_name, destination, source_uris,
client=self)
def copy_table(self, job_name, destination, *sources):
"""Construct a job for copying one or more tables into another table.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be copied.
:type sources: sequence of :class:`google.cloud.bigquery.table.Table`
:param sources: tables to be copied.
:rtype: :class:`google.cloud.bigquery.job.CopyJob`
:returns: a new ``CopyJob`` instance
"""
return CopyJob(job_name, destination, sources, client=self)
def extract_table_to_storage(self, job_name, source, *destination_uris):
"""Construct a job for extracting a table into Cloud Storage files.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract
:type job_name: str
:param job_name: Name of the job.
:type source: :class:`google.cloud.bigquery.table.Table`
:param source: table to be extracted.
:type destination_uris: sequence of string
:param destination_uris: URIs of CloudStorage file(s) into which
table data is to be extracted; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
:returns: a new ``ExtractTableToStorageJob`` instance
"""
return ExtractTableToStorageJob(job_name, source, destination_uris,
client=self)
def run_async_query(self, job_name, query,
udf_resources=(), query_parameters=()):
"""Construct a job for running a SQL query asynchronously.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query
:type job_name: str
:param job_name: Name of the job.
:type query: str
:param query: SQL query to be executed
:type udf_resources: tuple
:param udf_resources: An iterable of
:class:`google.cloud.bigquery._helpers.UDFResource`
(empty by default)
:type query_parameters: tuple
:param query_parameters:
An iterable of
:class:`google.cloud.bigquery._helpers.AbstractQueryParameter`
(empty by default)
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: a new ``QueryJob`` instance
"""
return QueryJob(job_name, query, client=self,
udf_resources=udf_resources,
query_parameters=query_parameters)
def run_sync_query(self, query, udf_resources=(), query_parameters=()):
"""Run a SQL query synchronously.
:type query: str
:param query: SQL query to be executed
:type udf_resources: tuple
:param udf_resources: An iterable of
:class:`google.cloud.bigquery._helpers.UDFResource`
(empty by default)
:type query_parameters: tuple
:param query_parameters:
An iterable of
:class:`google.cloud.bigquery._helpers.AbstractQueryParameter`
(empty by default)
:rtype: :class:`google.cloud.bigquery.query.QueryResults`
:returns: a new ``QueryResults`` instance
"""
return QueryResults(query, client=self,
udf_resources=udf_resources,
query_parameters=query_parameters)
# pylint: disable=unused-argument
def _item_to_project(iterator, resource):
"""Convert a JSON project to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a project.
:rtype: :class:`.Project`
:returns: The next project in the page.
"""
return Project.from_api_repr(resource)
# pylint: enable=unused-argument
def _item_to_dataset(iterator, resource):
"""Convert a JSON dataset to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a dataset.
:rtype: :class:`.Dataset`
:returns: The next dataset in the page.
"""
return Dataset.from_api_repr(resource, iterator.client)
def _item_to_job(iterator, resource):
"""Convert a JSON job to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a job.
:rtype: job instance.
:returns: The next job in the page.
"""
return iterator.client.job_from_resource(resource)
|
ammarkhann/FinalSeniorCode
|
lib/python2.7/site-packages/google/cloud/bigquery/client.py
|
Python
|
mit
| 15,220
|
# encoding: utf-8
"""
Rules system.
"""
import copy
from operator import itemgetter
class RuleBase(object):
"""
All rules inherit from RuleBase. All rules needs a condition, a response.
RuleBase is the base model to all rules. with this class, the rules will can to access
to the main class (Gozokia), the sentence (the input), and/or the
analyzer (if it is active)
"""
completed = False
reload = True
response_output = ""
print_output = ""
def __init__(self):
self.set_reload(False)
def condition_raise(self, *args, **kwargs):
self.gozokia = kwargs.get('gozokia')
self.analyzer = self.gozokia.analyzer
self.sentence = self.gozokia.sentence
def condition_completed(self, *args, **kwargs):
self.gozokia = kwargs.get('gozokia')
self.analyzer = self.gozokia.analyzer
self.sentence = self.gozokia.sentence
def response(self, *args, **kwargs):
raise NotImplementedError(__class__.__name__ + ": response not defined")
def get_response(self, *args, **kwargs):
self.response(*args, **kwargs)
return self.response_output, self.print_output
def is_completed(self, *args, **kwargs):
return self.completed
def set_completed(self, status=True):
self.completed = status
def set_reload(self, reload):
self.reload = reload
def reload_rule(self):
if self.reload:
self.set_completed(False)
return True
else:
return False
def __str__(self):
return self.__class__.__name__
class Rules(object):
__rules_pool = []
__rules_map = {}
__rules_qeue = []
__rules_qeue_completed = []
__active_rule = None
_STATUS_RULES_KEY = "status"
_STATUS_RULES = (0, 1, 2)
_STATUS_RULE_COMPLETED = 0
_STATUS_RULE_PENDING = 1
_STATUS_RULE_ACTIVE = 2
_RAISE_COND = 1
_OBJETIVE_COND = 2
__RULE_KEY_CLASS = "class"
__RULE_KEY_NAME = "rule"
def __init__(self, * args, **kwargs):
self.session_id = kwargs['sessionid']
# Set the session to
for rule_pool in self.__rules_pool:
rule_pool['session'] = self.session_id
self.__rules_map[self.session_id] = []
self._rules_completed = self.__rules_map[self.session_id]
self.__rules_map[self.session_id] = self.__rules_pool
self.__rules_qeue = self.__rules_map[self.session_id]
"""
self.__rules_qeue = copy.copy(self.__rules_pool)
"""
def add(self, rule_class, **options):
rank = 10
type_rule = None
rule_object = rule_class()
if 'rank' in options and type(options['rank']) is int:
rank = options['rank']
if 'type' in options and type(options['type']) is int:
type_rule = options['type']
if 'name' in options and type(options['name']) is str:
rule_name = options['name']
else:
rule_name = str(rule_object)
# Session is none because "add" method is a decorator. When this method is executed
# the init method not exist
if rule_name not in set(r['rule'] for r in self.__rules_pool):
self.__rules_pool.append({'session': None, 'rule': rule_name, self.__RULE_KEY_CLASS: copy.copy(rule_object),
'rank': rank, 'type': type_rule,
self._STATUS_RULES_KEY: self._STATUS_RULE_PENDING})
def get_rules(self, type_rule=None):
"""
return a diccionary of rules order by rank and filter by type or fule
"""
f = lambda x: True
if type_rule in [self._RAISE_COND, self._OBJETIVE_COND]:
f = lambda x: x['type'] == type_rule and x[self.__RULE_KEY_CLASS].completed == False
return sorted(filter(f, self.__rules_qeue), key=itemgetter('rank'))
def get_rules_completed(self):
return sorted(self.__rules_qeue_completed, key=itemgetter('rank'))
def get_raises(self):
for rule in self.get_rules(type_rule=self._RAISE_COND):
yield rule
def get_objetives(self):
for rule in self.get_rules(type_rule=self._OBJETIVE_COND):
yield rule
def get_rule(self, gozokia):
"""
Get the active rule or find one.
"""
if self.exist_active_rule():
active_rule_object = self.get_active_rule().get(self.__RULE_KEY_CLASS)
active_rule_object.condition_completed(gozokia=gozokia)
if active_rule_object.is_completed():
self.complete_active_rule()
self.get_rule(gozokia=gozokia)
else:
for r in self:
if r.get(self.__RULE_KEY_CLASS).condition_raise(gozokia=gozokia):
self.set_active_rule(r)
break
return self.__active_rule
def eval(self, gozokia):
response_output = None
print_output = None
rule = self.get_rule(gozokia)
if rule:
active_rule_object = rule.get(self.__RULE_KEY_CLASS)
response_output, print_output = active_rule_object.get_response()
active_rule_object.condition_completed(gozokia=gozokia)
if active_rule_object.is_completed():
self.complete_active_rule()
return rule, response_output, print_output
def set_rule_status_active(self, rule):
print("RULE {} start".format(rule.get(self.__RULE_KEY_NAME)))
rule[self._STATUS_RULES_KEY] = self._STATUS_RULE_ACTIVE
self.set_active_rule(None)
def set_rule_status_pending(self, rule):
print("RULE {} pending".format(rule.get(self.__RULE_KEY_NAME)))
rule[self._STATUS_RULES_KEY] = self._STATUS_RULE_PENDING
def set_rule_status_completed(self, rule):
print("RULE {} completed".format(rule.get(self.__RULE_KEY_NAME)))
rule[self._STATUS_RULES_KEY] = self._STATUS_RULE_COMPLETED
def complete_active_rule(self):
rule = self.get_active_rule()
self.set_rule_completed(rule)
self.set_active_rule(None)
def set_rule_completed(self, rule):
self.set_rule_status_completed(rule)
if rule.get(self.__RULE_KEY_CLASS).reload_rule() is False:
self.pop(rule)
def set_rule_pending(self, rule):
self.set_rule_status_pending(rule)
def get_active_rule(self, key=None):
if key is None:
rule = self.__active_rule
else:
rule = self.__active_rule[key]
return rule
def set_active_rule(self, rule=None):
if rule:
self.set_rule_status_active(rule)
self.__active_rule = rule
def stop_active_rule(self):
self.set_rule_status_pending(self.__active_rule)
self.set_active_rule(None)
def exist_active_rule(self):
return self.__active_rule is not None
def pop(self, rule):
# Pop rule from main queue
self.__rules_qeue = [r for r in self if r.get(self.__RULE_KEY_CLASS) != rule.get(self.__RULE_KEY_CLASS)]
# Add rule to completed queue
if rule.get(self.__RULE_KEY_CLASS) not in set(r.get(rule.get(self.__RULE_KEY_CLASS)) for r in self.__rules_qeue_completed):
self.__rules_qeue_completed.append(rule)
def __getitem__(self, key):
if key in self.__rules_qeue:
return self.__rules_qeue[key]
raise KeyError
def __iter__(self):
for rule in self.get_rules():
yield rule
|
avara1986/gozokia
|
gozokia/core/rules.py
|
Python
|
mit
| 7,566
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_reimage_request_initial(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_deallocate_request_initial(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request_initial(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_instance_view_request(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
resource_group_name: str,
virtual_machine_scale_set_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
select: Optional[str] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"virtualMachineScaleSetName": _SERIALIZER.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
if select is not None:
query_parameters['$select'] = _SERIALIZER.query("select", select, 'str')
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_power_off_request_initial(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_restart_request_initial(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_start_request_initial(
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"vmScaleSetName": _SERIALIZER.url("vm_scale_set_name", vm_scale_set_name, 'str'),
"instanceId": _SERIALIZER.url("instance_id", instance_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class VirtualMachineScaleSetVMsOperations(object):
"""VirtualMachineScaleSetVMsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2016_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _reimage_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
@distributed_trace
def begin_reimage(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Reimages (upgrade the operating system) a specific virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reimage_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
def _deallocate_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_deallocate_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._deallocate_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_deallocate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
@distributed_trace
def begin_deallocate(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Deallocates a specific virtual machine in a VM scale set. Shuts down the virtual machine and
releases the compute resources it uses. You are not billed for the compute resources of this
virtual machine once it is deallocated.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._deallocate_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Deletes a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVM":
"""Gets a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVM, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVM
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVM"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVM', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace
def get_instance_view(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVMInstanceView":
"""Gets the status of a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVMInstanceView, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVMInstanceView
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMInstanceView"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_instance_view_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get_instance_view.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVMInstanceView', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
filter: Optional[str] = None,
select: Optional[str] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.VirtualMachineScaleSetVMListResult"]:
"""Gets a list of all virtual machines in a VM scale sets.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the VM scale set.
:type virtual_machine_scale_set_name: str
:param filter: The filter to apply to the operation. Allowed values are
'startswith(instanceView/statuses/code, 'PowerState') eq true', 'properties/latestModelApplied
eq true', 'properties/latestModelApplied eq false'.
:type filter: str
:param select: The list parameters. Allowed values are 'instanceView', 'instanceView/statuses'.
:type select: str
:param expand: The expand expression to apply to the operation. Allowed values are
'instanceView'.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineScaleSetVMListResult or the result
of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVMListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines'} # type: ignore
def _power_off_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_power_off_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._power_off_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_power_off_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
@distributed_trace
def begin_power_off(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Power off (stop) a virtual machine in a VM scale set. Note that resources are still attached
and you are getting charged for the resources. Instead, use deallocate to release resources and
avoid charges.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._power_off_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
def _restart_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_restart_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._restart_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
@distributed_trace
def begin_restart(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Restarts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._restart_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
def _start_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_start_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._start_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
@distributed_trace
def begin_start(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Starts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2016_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2016_03_30/operations/_virtual_machine_scale_set_vms_operations.py
|
Python
|
mit
| 56,685
|
import os
import sys
import numpy as np
from basic.common import checkToSkip
def process(options):
overwrite = options.overwrite
inputeFile = options.inputeFile
weightFile = options.weightFile
resultFile = options.resultFile
weightFile = os.path.join('result', weightFile)
weight = open(weightFile).readline().strip().split()
weight = np.array(weight, dtype=np.float)
resultFile = os.path.join('result', resultFile)
if checkToSkip(resultFile, overwrite):
sys.exit(0)
fout = open(resultFile, 'w')
done = 0
for line in open(os.path.join('result', inputeFile)):
elems = line.strip().split()
vecs = map(float, elems[3:])
vecs = np.array(vecs, dtype=np.float)
assert(len(weight) == len(vecs))
fout.write(" ".join(elems[:2]) + " " + str(np.dot(weight, vecs)) + '\n')
done += 1
if done % 10000 == 0:
print done, 'Done'
fout.close()
print "final score result after relevance fusion have written in %s" % resultFile
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
from optparse import OptionParser
parser = OptionParser(usage="""usage: %prog [options] """)
parser.add_option("--overwrite", default=0, type="int", help="overwrite existing file (default: 0)")
parser.add_option("--inputeFile", default='qid.img.lable.feature.txt', type="string", help="file stored all score from different methods")
parser.add_option("--weightFile", default='optimized_wights.txt', type="string", help="optimized wight will be written in the file")
parser.add_option("--resultFile", default='fianl.result.txt', type="string", help="final score after relevance fusion")
(options, args) = parser.parse_args(argv)
return process(options)
if __name__ == "__main__":
sys.exit(main())
|
danieljf24/cmrf
|
relevanceFusion.py
|
Python
|
mit
| 1,905
|
"""
functions for evaluating spreadsheet functions
primary function is parse, which the rest revolves around
evaluate should be called with the full string by a parent program
A note on exec:
This uses the exec function repeatedly, and where possible, use of it
should be minimized, but the intention of this is only meant to be run
on trusted spreadsheets. Future development of this may focus on it being
more secure, but the primary goal is simply to evaluate the most common
functions, regardless the ability for code to be injected.
Another note:
this whole thing could stand to be redone
"""
# import spreadsheet mirroring functions
import eval.functions as functions
import eval.translate as translate
import eval.storage as global_file # historical reasons for name
__author__ = 'user0'
def evaluate(s, reference_dictionary=None):
# if included, reference dictionary is a dictionary of relevant
# cell references.
# alternatively, if reference_dictionary is None, it is presumed
# that it is not needed to replace references with values in the
# formula. The reference_type arg, if none, defaults to 'sheet'
if s[0] == '=':
# get rid of the equals sign at the beginning of the formula
s = s[1:]
# send reference dictionary to storage
global_file.formulas = reference_dictionary
# I feel like I'm forgetting something else here
return parse(s)
def parse(s, function=None):
# returns evaluation of formula via recursive function;
# before this function is run, dependencies should be
# identified and evaluated
replace = {}
it = 0
level = 0
# replace references with cell values
s = s.lower()
# for formula in global_file.formulas:
# if formula in s:
# s = s.replace(formula, str(
# global_file.formulas[formula].return_value()))
# replace values with python equivalents
# ('^' with '**' for example)
s = translate.spreadsheet_replace(s)
# evaluate formula
for char in s:
if char == '(':
level += 1
if level == 1:
parent_start = it
if char == ')':
level -= 1
if level == 0:
parent_close = it
prefix = get_prefix(s, parent_start)
body = s[parent_start + 1: parent_close]
formula = '{}({})'.format(prefix, body)
replace[formula] = str(parse(prefix, body))
verbose('replacing {} with {}'.format(formula,
replace[formula]))
it += 1
# replace strings
for entry in replace:
s = s.replace(entry, replace[entry])
# depending on the presence of a function, either simply evaluate,
# or use a function from functions
if function:
# if function is in the replacement dictionary,
# replace it with that entry
if function in functions.function_replace:
function = functions.function_replace[function]
else:
print('function %s was not in function dictionary') % function
# function just stopped sounding like a word
# insert the formula in a python-readable format
body_strings = s.split(',') # this is used below
exec_string = '%s(body_strings)' % function
else:
# replace references with values and find result
s = s.lower()
for reference in global_file.formulas:
while reference.lower() in s:
replacement_cell = global_file.formulas[reference]
if replacement_cell.data_type == 'string' and \
not replacement_cell.script:
replacement = '\'%s\'' % replacement_cell.text
else:
replacement = replacement_cell.value
s = s.replace(reference.lower(), replacement)
exec_string = s
exec_string = eval_append(exec_string)
verbose(exec_string)
exec(exec_string)
return global_file.returned
def get_prefix(formula_string, start):
alpha = 'abcdefghijklmnopqrstuvwxyz'
number = '.0123456789'
prefix = ''
string_position = start - 1
while True:
character = formula_string[string_position]
if string_position >= 0:
if character in alpha or character in number:
prefix = character + prefix
else:
return prefix
else:
return prefix
string_position -= 1
def eval_append(s):
prefix = 'global_file.returned = '
return prefix + s
def verbose(s):
# if verbose setting, print s
if global_file.verbose:
print(s)
|
TryExceptElse/pysheetdata
|
eval/parser.py
|
Python
|
mit
| 4,789
|
# -*- coding: utf-8 -*-
import sys
PY2 = sys.version_info[0] == 2
if PY2:
long = long
unicode = unicode
basestring = basestring
from urllib import quote_plus, unquote_plus, quote, unquote
from urlparse import parse_qsl
else:
long = int
unicode = str
basestring = str
from urllib.parse import (quote_plus, unquote_plus,
parse_qsl, quote, unquote)
class Null(object):
def __bool__(self):
return False
def __eq__(self, other):
return other is None
def decode(s, encodings=('utf8', 'ascii', 'latin1')):
for encoding in encodings:
try:
return s.decode(encoding)
except UnicodeDecodeError:
pass
return s.decode('utf8', 'ignore')
|
sdispater/eloquent
|
eloquent/utils/__init__.py
|
Python
|
mit
| 773
|
#! /usr/bin/env python2.5
# -*- coding: latin-1 -*-
import axiom_rules
import fact_groups
import instantiate
import pddl
import sas_tasks
import simplify
import sys
import PrecosatSolver
import copy
import os
import SASE_helper
import SolverAdapter
from SASE_helper import *
from SASE_base import *
import math
import signal
#DEBUGGING OPTIONS
PRINT_OUT_CLAUSES = False
PRINT_ALL_OP_AX = False
USE_CLIQUE_BIN_CONSTRAINT = True
def signal_handler(signum, frame):
raise Exception("Time out.")
################################################
## This basic encoding doesn't support axiom (i.e. STRIPS only)
## Two Types of vars: trans, op
################################################
class PlainSASEncoding(BaseSASEncoding):
def print_all_variables(self):
for var in range(self.var_cnt-1):
t = self.var_lnk[var]
if t[0] == "trans":
if len(t) == 5:
print var+1, "is var[%d:%d->%d@%d]" % (t[1],t[2],t[3],t[4])
elif t[0] == "op":
print var+1, "is op[%d:%s@%d]" %(t[1], self.task.operators[t[1]].name, t[2])
elif t[0] == "AUX":
print var+1, "is AUX for %dth bit of var set:" % ( t[1] ), t[2]
else:
pass
def init_encoding(self):
self.op_var = {} #Each key in the dictionary is a 2-tuple (op_index, time)
self.trans_var = {}
self.var_lnk = []
self.var_cnt = 1
def encoding_specific_processing(self):
self.useful_trans_cnt = len(self.trans_lst)
for mv in range(len(self.mv_ranges)):
for v in range(self.mv_ranges[mv]):
if (mv,v,v) not in self.trans_dict:
self.trans_dict[mv,v,v] = len(self.trans_lst)
self.trans_lst.append( (mv,v,v))
self.pres[mv][v].add(v)
self.posts[mv][v].add(v)
def make_op_var( self, op, time ):
assert type(op) == type(1)
assert (op,time) not in self.op_var
self.op_var[op,time] = self.var_cnt
self.var_lnk.append( ("op", op, time) )
self.var_cnt += 1
return self.var_cnt - 1
def make_trans_var(self, mv, pre, post, time):
assert (mv,pre,post,time) not in self.trans_var
self.trans_var[mv,pre,post,time] = self.var_cnt
self.var_lnk.append( ("trans", mv, pre, post, time) )
self.var_cnt += 1
return self.var_cnt - 1
#############################################################################
def add_clique_exclusion_constraints(self,s,lits):
if len(lits)<4:
for i,lit in enumerate(lits):
for lit2 in lits[i+1:]:
s.new_clause_add_lit( -1 * lit )
s.new_clause_add_lit( -1 * lit2 )
s.new_clause_push()
else:
num_aux_vars = int( math.ceil( math.log(len(lits),2) ) )
aux_vars = []
for i in range(num_aux_vars):
aux_vars.append(self.var_cnt)
self.var_lnk.append(("AUX",i,set(lits)))
self.var_cnt += 1
for order,lit in enumerate(lits):
binstr = dec2bin(order)
assert len(binstr) <= num_aux_vars
binstr = "0" * (num_aux_vars - len(binstr) ) + binstr
for i,bit in enumerate(binstr):
s.new_clause_add_lit( -1 * lit )
if bit == '0':
s.new_clause_add_lit( -1 * aux_vars[i])
else:
s.new_clause_add_lit( aux_vars[i])
s.new_clause_push()
# This type of additional clause will rule out all the solutions, why???????????????
#Other Direction;
#s.new_clause_add_lit( lit )
#for i,bit in enumerate( binstr ):
# if bit == '0':
# s.new_clause_add_lit( aux_vars[i])
# else:
# s.new_clause_add_lit( -1 * aux_vars[i] )
#s.new_clause_push()
#""" Do WE NEED THIS!!!!????
#for order in range(len(lits),2**num_aux_vars):
# binstr = dec2bin(order)
# binstr = "0" * (num_aux_vars - len(binstr) ) + binstr
# for i,bit in enumerate(binstr):
# if bit == '0':
# s.new_clause_add_lit( aux_vars[i])
# else:
# s.new_clause_add_lit( -1 * aux_vars[i])
# s.new_clause_push()
def add_trans_mutual_exclusions(self,s,mv,time):
if USE_CLIQUE_BIN_CONSTRAINT:
clique = set()
for pre in range(0,self.mv_ranges[mv]):
for post in range(0,self.mv_ranges[mv]):
if ( mv, pre, post, time ) in self.trans_var:
var = self.trans_var[ mv, pre, post, time ]
clique.add(var)
for val in range(self.mv_ranges[mv]):
if (mv,val,val,time) in self.trans_var:
var = self.trans_var[mv,val,val,time]
clique.add(var)
self.add_clique_exclusion_constraints( s, list(clique) )
#Special Case for V:-1->X:
#to-do: This condition might be flawed.
#Are we sure that -1->X and Y->X should be excluded or not?
for val in range(self.mv_ranges[mv]):
if (mv,-1,val,time) in self.trans_var:
tvar = self.trans_var[mv,-1,val,time]
ex_var = -1
if (mv,val,val,time) in self.trans_var:
ex_var = self.trans_var[ mv,val,val,time ]
for lit in list(clique):
if ex_var != lit:
s.new_clause_add_lit( -1 * tvar )
s.new_clause_add_lit( -1 * lit )
s.new_clause_push()
#Another possible choice:
#for val in range(self.mv_ranges[mv]):
# if (mv,-1,val,time) in self.trans_var:
# tvar = self.trans_var[mv,-1,val,time]
# excluded_var_set = set(clique)
# for pre in range(0,self.mv_ranges[mv]):
# if (mv,pre,val,time) in self.trans_var:
# excluded_var_set.remove( self.trans_var[mv,pre,val,time] )
# for lit in list(excluded_var_set):
# s.new_clause_add_lit( -1 * tvar )
# s.new_clause_add_lit( -1 * lit )
# s.new_clause_push()
else:
for pre in range(-1,self.mv_ranges[mv]):
for post in range(0,self.mv_ranges[mv]):
if ( mv, pre, post, time ) in self.trans_var:
var = self.trans_var[ mv, pre, post, time ]
for val in range(self.mv_ranges[mv]):
if (val,val) != ( pre,post ) and (mv, val, val, time ) in self.trans_var:
var2 = self.trans_var[ mv, val, val, time ]
s.new_clause_add_lit( -1 * var )
s.new_clause_add_lit( -1 * var2 )
s.new_clause_push()
for others in self.mv_trans[mv]:
assert len(others) == 2
if others[0]==others[1] or others == ( pre,post ):
continue
if (mv, others[0], others[1], time ) in self.trans_var:
var2 = self.trans_var[ mv, others[0], others[1], time ]
s.new_clause_add_lit( -1 * var )
s.new_clause_add_lit( -1 * var2 )
s.new_clause_push()
############################################################################
#### Encoding of each iteration; ####
############################################################################
def solve_decision( self, task, N ):
self.init_encoding()
std = sys.stdout
#s = minisat.Solver()
s = PrecosatSolver.Solver()
if self.status.dump != 0 :
#assert False
global PRINT_OUT_CLAUSES
PRINT_OUT_CLAUSES = True
s = SolverAdapter.Solver()
################ Constructing Variables ################
for trans,index in self.trans_dict.items():
for time in range(self.first_apr_trans[index],N):
if N - time < self.dist_to_N[index]:
continue
self.make_trans_var(trans[0], trans[1], trans[2], time )
print "Total Number of Trans Variables %d;" % ( self.var_cnt ),
reduced_cnt = 0
for time in range(N):
flag = False
for op_i,op in enumerate(self.task.operators):
if self.first_appearance[op_i] > time:
continue
flag = False
for mv,pre,post,cond in op.pre_post:
if N - time < self.dist_to_N[self.trans_dict[mv,pre,post]]:
flag = True
break
if not flag:
if op_i not in self.reduced_op_dict:
self.make_op_var( op_i, time)
else:
t = self.reduced_op_dict[op_i]
if len(t) == 1:
tran = self.trans_lst[t[0]]
chk_v = (tran[0],tran[1],tran[2],time)
if chk_v in self.trans_var:
reduced_cnt += 1
self.op_var[op_i,time] = self.trans_var[chk_v]
else:
self.make_op_var( op_i, time)
print " Op Variables ", self.var_cnt, 'reduced by %d' %( reduced_cnt )
################# Part I. Constraints Of Transitions #####################
#Constraint Type 1: Exclusiveness of Underlying Atomic Transitions
for time in range(N):
for mv in [mv for mv in range(len(self.mv_ranges))]:
self.add_trans_mutual_exclusions(s,mv,time)
print_clause_progress("I.a (Trans Mutex)",s)
#Constraint 2: Progressive relations of underlying Atomic transitions
# : value at N implies a disjunction of corresponding value changes at N+1
# : Axiom MVs are not included
############# This is forwarding approach
for time in range( 0, N-1 ):
for mv in range(self.num_mv):
for pre in range(-1,self.mv_ranges[mv]):
for post in range(self.mv_ranges[mv]):
if ( mv, pre, post, time) not in self.trans_var:
continue
clause = []
var = self.trans_var[ mv, pre, post, time ]
clause.append( var * -1)
for i in range(0, self.mv_ranges[mv]):
if ( mv, post, i, time + 1) in self.trans_var:
var2 = self.trans_var[ mv, post, i, time + 1]
clause.append( var2 )
if (mv,-1,i,time+1) in self.trans_var:
var2 = self.trans_var[ mv, -1, i, time + 1]
clause.append( var2 )
if len(clause)!=1:
for lit in clause:
s.new_clause_add_lit(lit)
s.new_clause_push()
print_clause_progress("I.b Transitions's progression", s)
# Constraint 2: in a backward way;
for time in range( 1, N ):
for mv in range(self.num_mv):
for pre in range(0,self.mv_ranges[mv]):
for post in range(self.mv_ranges[mv]):
if ( mv, pre, post, time) not in self.trans_var:
continue
clause = []
var = self.trans_var[ mv, pre, post, time ]
clause.append( var * -1)
for i in range(0, self.mv_ranges[mv]):
if ( mv, i, pre, time - 1) in self.trans_var:
var2 = self.trans_var[ mv, i, pre, time - 1]
clause.append( var2 )
if (mv,-1,pre,time-1) in self.trans_var:
var2 = self.trans_var[ mv, -1, pre, time - 1]
clause.append( var2 )
#if len(clause)!=1: I guess this is not necessary;
for lit in clause:
s.new_clause_add_lit(lit)
s.new_clause_push()
print_clause_progress("I.b Transitions's progression (Backward)", s)
#Constraint 6: Initial state
for mv,val in enumerate(task.init.values):
for possible_post in range(0,self.mv_ranges[mv]):
if ( mv, val, possible_post, 0 ) in self.trans_var:
var = self.trans_var[ mv, val, possible_post, 0 ]
s.new_clause_add_lit( var )
for vv in range(self.mv_ranges[mv]):
if(mv,-1,vv,0) in self.trans_var:
s.new_clause_add_lit( self.trans_var[mv,-1,vv,0] )
s.new_clause_push()
print_clause_progress("I.c Inital state", s )
#Constraint 7: Goal:
for mv,val in task.goal.pairs:
clause = []
debug_lst = []
for possible_pre in range(-1,self.mv_ranges[mv]):
if (mv, possible_pre, val, N-1) in self.trans_var:
var = self.trans_var[ mv, possible_pre, val, N-1 ]
clause.append( var )
else:
debug_lst.append((mv, possible_pre, val, N-1))
if len(clause)==0:
del s
return []
else:
for lit in clause:
s.new_clause_add_lit(lit)
s.new_clause_push()
print_clause_progress("I.d Goal",s)
#####################################################################
############ # Part II. Constraints with Op involved ###########
#####################################################################
#Constraint Type 0: At least one action needs to be true at each time step;
#This type of constraints is just not necessary here;
#for time in range(N):
# flag = False
# for op_i,op in enumerate(task.operators):
# if self.first_appearance[op_i] > time:
# continue
# s.new_clause_add_lit( self.op_var[ op_i, time] )
# flag = True
# if flag:
# s.new_clause_push()
#print_clause_progress("II.a (Action's Existence):", s)
#Constraint 4: Mapping from trans to actions (one transition implies a disj of actions)
for index in range(len(self.trans_conn)):
trans = self.trans_lst[index]
if trans[1] == trans[2]:
continue
if index > self.useful_trans_cnt:
continue
for time in range( self.first_apr_trans[index], N - self.dist_to_N[index] ):
lits = []
trans_var = self.trans_var[ trans[0], trans[1], trans[2], time ]
lits.append( trans_var * -1 )
for op in list(self.trans_conn[index]):
if (op,time) in self.op_var:
opv = self.op_var[op,time]
lits.append( opv )
#Remember, This condition check is necessary!!
#trans[1]!=trans[2]: make sure those non-change transition
# even they don't imply anything, will not be pruned.
if trans[1]!=trans[2] or len(lits)>1:
for lit in lits:
s.new_clause_add_lit( lit )
s.new_clause_push()
print_clause_progress("II.b Trans => Opers", s)
#Constraint 3: Mapping from actions to transitions (one action implies a conjunction of transitions)
# Any trans_var doesn't exists, there must be an exception!!!
for op_i,op in enumerate(task.operators):
for time in range(N):
if (op_i,time) not in self.op_var:
continue
op_var = self.op_var[ op_i, time ]
for mv, pre, post, cond in op.pre_post:
assert len(cond) == 0
chk_var = ( mv, pre, post, time )
assert chk_var in self.trans_var
trans_var = self.trans_var[chk_var]
s.new_clause_add_lit( -1 * op_var )
s.new_clause_add_lit( trans_var )
s.new_clause_push()
for mv, val in op.prevail:
chk_var = ( mv, val, val, time )
assert chk_var in self.trans_var
trans_var = self.trans_var[chk_var]
s.new_clause_add_lit( -1 * op_var )
s.new_clause_add_lit( trans_var )
s.new_clause_push()
print_clause_progress("II.c Opers => Trans",s)
#Constraint 4: Mutex Conditions;
#Two actions with same pre-post transition cannot be executed at the same time; (a violation)
if USE_CLIQUE_BIN_CONSTRAINT :
for time in range(0,N):
for trans,index in self.trans_dict.items():
if trans[1]==trans[2]:# or trans[1] == -1:
continue
if index in self.unnecessary_cliques: #and time > self.clique_first_required_time[index]:
continue
if len(self.trans_conn[index]) > 1:
clique = []
for op1 in self.trans_conn[index]:
if (op1,time) not in self.op_var:
continue
op_var = self.op_var[op1,time]
clique.append( op_var )
#if index in self.unnecessary_cliques and len(clique)==len(self.trans_conn[index]):
# continue
if len(clique)>1:
self.add_clique_exclusion_constraints(s,clique)
else:
for trans,index in self.trans_dict.items():
if trans[1]==trans[2]:
continue
if len(self.trans_conn[index]) > 1:
lst = list(self.trans_conn[index])
for i,op1 in enumerate(lst):
for op2 in lst[i+1:]:
if op1 == op2:
continue
for time in range(0,N):
if ((op1,time) not in self.op_var) or ( (op2,time) not in self.op_var):
continue
op1_var = self.op_var[ op1, time ]
op2_var = self.op_var[ op2, time ]
s.new_clause_add_lit( -1 * op1_var )
s.new_clause_add_lit( -1 * op2_var )
s.new_clause_push()
print_clause_progress("II.d Opers' mutex",s)
print "SASE Encoding finished. #Var:", self.var_cnt
#Conflict Effects! This constraint only applies to Rovers domain
#This is a hack!!
for time in range(0,N):
for item in self.conflict_eff_cliques:
clique = []
for op in item:
if (op,time) in self.op_var:
clique.append( self.op_var[op,time] )
self.add_clique_exclusion_constraints(s,clique)
######1. Implication Trans;
for index,implied_trans in self.trans_implication.items():
mt = self.trans_lst[index]
for time in range(N):
if(mt[0],mt[1],mt[2],time) in self.trans_var:
var = self.trans_var[mt[0],mt[1],mt[2],time]
for i2 in list(implied_trans):
it = self.trans_lst[i2]
mv,val,val2 = it
for pre in range(-1,self.mv_ranges[mv]):
if pre!= val and (mv,pre,val,time) in self.trans_var:
s.new_clause_add_lit( -1 * var)
var2 = self.trans_var[mv,pre,val,time]
s.new_clause_add_lit( -1 * var2)
s.new_clause_push()
#for post in range(-1,self.mv_ranges[mv]):
# if post!=val2 and (mv,val2,post,time) in self.trans_var:
# s.new_clause_add_lit( -1 * var)
# var2 = self.trans_var[mv,val2,post,time]
# s.new_clause_add_lit( -1 * var2)
# s.new_clause_push()
print_clause_progress("III.b Implied Transitions ", s)
self.encoding_post_operations( self.var_cnt, s )
if PRINT_OUT_CLAUSES == True:
if self.status.output == "":
self.print_all_variables()
s.print_out_all_clauses()
s.solver()
else:
s.dump_all_clauses( self.status.output )
sys.exit()
else:
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(self.status.time_limit)
rst = s.solve()
if rst == False:
del s
return []
else:
plan = self.decode( s, task, N )
del s
return plan
def decode(self, solver, task, N ):
print "Decoding for total time layer ", N
plan = [[] for item in range(N)]
for t in range( 0, N ):
for index,o_oper in enumerate(task.operators):
if (index,t) in self.op_var:
var = self.op_var[index, t]
#rst = solver.model.element(var-1).get_v()
rst = solver.get_assignment(var)
if rst == 1:
if self.first_appearance[index] > t:
print "Checking and Adding Action:", o_oper.name
print "However, graph-plan info says it can only be true starting from layer", self.first_appearance[index]
plan[t].append(index)
for item in plan:
print item,
return plan
### END OF ENCODING_CONSTANT class ###################################################
|
thierry1985/project-1022
|
translate/STRIPS_SASE/SASE_plain.py
|
Python
|
mit
| 20,076
|
# 主要是为了使用中文显示 app 于 admin 界面
default_app_config = 'bespeak_meal.apps.Bespeak_meal_config'
|
zhengxinxing/bespeak_meal
|
__init__.py
|
Python
|
mit
| 120
|
"""
Restores the uplifted horizons while restricting slip along the fault to the
specified azimuth.
"""
import numpy as np
import matplotlib.pyplot as plt
from fault_kinematics.homogeneous_simple_shear import invert_slip
import data
import basic
def main():
azimuth = data.fault_strike + 90
# azimuth = 304 # Plate motion from Loveless & Meade
def func(*args, **kwargs):
return forced_direction_inversion(azimuth, *args, **kwargs)
slips, heaves, variances, planar_variances = basic.restore_horizons(func)
basic.plot_restored_locations(slips, heaves)
plt.show()
def forced_direction_inversion(azimuth, fault, xyz, alpha, **kwargs):
"""Forces the inversion to only consider slip along the given azimuth."""
azimuth = np.radians(90 - azimuth)
dx, dy = np.cos(azimuth), np.sin(azimuth)
direc = [[dx, dy], [dx, dy]]
return invert_slip(fault, xyz, alpha, direc=direc, **kwargs)
if __name__ == '__main__':
main()
|
joferkington/oost_paper_code
|
invert_slip_fixed_azimuth.py
|
Python
|
mit
| 966
|
from itertools import product
from demos.auction_model import demo
def test02():
sellers = [4]
buyers = [100]
results = demo(sellers, buyers, time_limit=False)
expected_results = {100: 4, 4: 100}
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test03():
expected_results = {101: 5, 5: 101, 6: None, 7: None}
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
results = demo(sellers, buyers)
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test04():
expected_results = {0: 101, 101: 0, 102: None,
103: None} # result: 0 enters contract with price 334.97 (the highest price)
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
results = demo(sellers, buyers)
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test05():
expected_results = {101: 7, 102: 6, 103: 5, 5: 103, 6: 102, 7: 101}
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
results = demo(sellers, buyers)
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test06():
expected_results = {0: 102, 1: 108, 2: 105, 3: 107, 4: 100, 5: 106, 6: 112, 7: 111, 8: 103, 9: 109, 10: 104, 100: 4, 101: None, 102: 0, 103: 8, 104: 10, 105: 2, 106: 5, 107: 3, 108: 1, 109: 9, 110: None, 111: 7, 112: 6}
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
error_sets = []
for s_init, b_init in list(product([True, False], repeat=2)):
if not s_init and not b_init:
continue # if neither seller or buyer initialise, obviously nothing will happen.
results = demo(sellers=sellers, buyers=buyers, seller_can_initialise=s_init, buyer_can_initialise=b_init)
errors = []
for k, v in results.items():
if not expected_results[k] == v: # , "Hmmm... That's not right {}={}".format(k, v)
errors.append((k, v))
if errors:
error_sets.append(errors)
if error_sets:
print("-" * 80)
for i in error_sets:
print(",".join(str(i) for i in sorted(i)), flush=True)
raise AssertionError("output does not reflect expected results.")
|
root-11/outscale
|
tests/auction_demo_tests.py
|
Python
|
mit
| 2,607
|
#!/usr/bin/env python3
##### ##### ##### ##### #### ####
# # # # # # # # # # #### #### # # #
##### #### ##### ##### ##### # # # # ####
# # # # # # # # # # # # #
# # # # # # # # #### # #### # ####
#finds the password of a desired rar or zip file using a brute-force algorithm
##will fail to find the password if the password has a character that isnt in
##the english alphabet or isnt a number (you can change the char. list though)
#now using itertools!
#importing needed modules
import time,os,sys,shutil,itertools
#checking if the user has unrar/p7zip installed
for which in ["unrar","p7zip"]:
if not shutil.which(which):
print("ERROR:",which,"isn't installed.\nExiting...")
sys.exit(-1)
#defining the function
def rc(rf):
alphabet="aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ1234567890"
start=time.time()
tryn=0
for a in range(1,len(alphabet)+1):
for b in itertools.product(alphabet,repeat=a):
k="".join(b)
if rf[-4:]==".rar":
print("Trying:",k)
kf=os.popen("unrar t -y -p%s %s 2>&1|grep 'All OK'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="All OK\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
elif rf[-4:]==".zip" or rf[-3:]==".7z":
print("Trying:",k)
kf=os.popen("7za t -p%s %s 2>&1|grep 'Everything is Ok'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="Everything is Ok\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
else:
print("ERROR: File isnt a RAR, ZIP or 7z file.\nExiting...")
#checking if the file exists/running the function
if len(sys.argv)==2:
if os.path.exists(sys.argv[1]):
rc(sys.argv[1])
else:
print("ERROR: File doesn't exist.\nExiting...")
else:
print("Usage:",os.path.basename(__file__),"[rar file]")
print("Example:",os.path.basename(__file__),"foobar.rar")
|
jonDel/pyrarcr
|
old/pyrarcr-0.2.py
|
Python
|
mit
| 2,210
|
import asyncio
import websockets
import duplex
rpc = duplex.RPC("json")
@asyncio.coroutine
def echo(ch):
obj, _ = yield from ch.recv()
yield from ch.send(obj)
rpc.register("echo", echo)
@asyncio.coroutine
def do_msgbox(ch):
text, _ = yield from ch.recv()
yield from ch.call("msgbox", text, async=True)
rpc.register("doMsgbox", do_msgbox)
@asyncio.coroutine
def server(conn, path):
peer = yield from rpc.accept(conn)
yield from peer.route()
start_server = websockets.serve(server, 'localhost', 8001)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
|
progrium/duplex
|
python/demo/demo.py
|
Python
|
mit
| 626
|
import json
from app.api import bp
from app.easyCI.scheduler import GitlabCIScheduler
from flask import current_app, url_for, make_response, request
from werkzeug.local import LocalProxy
logger = LocalProxy(lambda: current_app.logger)
@bp.route('/dashboard/', methods=['GET'])
def dashboard():
url = url_for('api.dashboard')
scheduler = GitlabCIScheduler()
data = scheduler.get_pipelines()
if not data:
response = make_response()
response.headers['Location'] = url
response.status_code = 204
return response
info = []
for (i, pipeline) in enumerate(data):
if i > 10:
break
if i == 0:
pipeline_id = pipeline["id"]
pipeline_vars = scheduler.get_pipeline_vars(pipeline_id)
print(pipeline_id, pipeline_vars)
info.append({
"id" : pipeline["id"],
"status" : pipeline["status"],
"created_at": pipeline["created_at"],
"updated_at": pipeline["updated_at"]
})
return json.dumps(info), 200, {'Content-Type': 'text/json'}
|
znick/anytask
|
easyci2/flask/app/api/showDashboard.py
|
Python
|
mit
| 1,107
|
"""Tests for Vizio config flow."""
from __future__ import annotations
from contextlib import asynccontextmanager
from datetime import timedelta
from typing import Any
from unittest.mock import call, patch
import pytest
from pytest import raises
from pyvizio.api.apps import AppConfig
from pyvizio.const import (
APPS,
DEVICE_CLASS_SPEAKER as VIZIO_DEVICE_CLASS_SPEAKER,
DEVICE_CLASS_TV as VIZIO_DEVICE_CLASS_TV,
INPUT_APPS,
MAX_VOLUME,
UNKNOWN_APP,
)
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
ATTR_SOUND_MODE,
DOMAIN as MP_DOMAIN,
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_SELECT_SOUND_MODE,
SERVICE_SELECT_SOURCE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SERVICE_VOLUME_DOWN,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
SERVICE_VOLUME_UP,
MediaPlayerDeviceClass,
)
from homeassistant.components.media_player.const import ATTR_INPUT_SOURCE_LIST
from homeassistant.components.vizio import validate_apps
from homeassistant.components.vizio.const import (
CONF_ADDITIONAL_CONFIGS,
CONF_APPS,
CONF_VOLUME_STEP,
DEFAULT_VOLUME_STEP,
DOMAIN,
SERVICE_UPDATE_SETTING,
VIZIO_SCHEMA,
)
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
from .const import (
ADDITIONAL_APP_CONFIG,
APP_LIST,
APP_NAME_LIST,
CURRENT_APP,
CURRENT_APP_CONFIG,
CURRENT_EQ,
CURRENT_INPUT,
CUSTOM_CONFIG,
ENTITY_ID,
EQ_LIST,
INPUT_LIST,
INPUT_LIST_WITH_APPS,
MOCK_SPEAKER_APPS_FAILURE,
MOCK_SPEAKER_CONFIG,
MOCK_TV_APPS_FAILURE,
MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG,
MOCK_TV_WITH_EXCLUDE_CONFIG,
MOCK_TV_WITH_INCLUDE_CONFIG,
MOCK_USER_VALID_TV_CONFIG,
NAME,
UNIQUE_ID,
UNKNOWN_APP_CONFIG,
VOLUME_STEP,
)
from tests.common import MockConfigEntry, async_fire_time_changed
async def _add_config_entry_to_hass(
hass: HomeAssistant, config_entry: MockConfigEntry
) -> None:
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
def _get_ha_power_state(vizio_power_state: bool | None) -> str:
"""Return HA power state given Vizio power state."""
if vizio_power_state:
return STATE_ON
if vizio_power_state is False:
return STATE_OFF
return STATE_UNAVAILABLE
def _assert_sources_and_volume(attr: dict[str, Any], vizio_device_class: str) -> None:
"""Assert source list, source, and volume level based on attr dict and device class."""
assert attr[ATTR_INPUT_SOURCE_LIST] == INPUT_LIST
assert attr[ATTR_INPUT_SOURCE] == CURRENT_INPUT
assert (
attr["volume_level"]
== float(int(MAX_VOLUME[vizio_device_class] / 2))
/ MAX_VOLUME[vizio_device_class]
)
def _get_attr_and_assert_base_attr(
hass: HomeAssistant, device_class: str, power_state: str
) -> dict[str, Any]:
"""Return entity attributes after asserting name, device class, and power state."""
attr = hass.states.get(ENTITY_ID).attributes
assert attr["friendly_name"] == NAME
assert attr["device_class"] == device_class
assert hass.states.get(ENTITY_ID).state == power_state
return attr
@asynccontextmanager
async def _cm_for_test_setup_without_apps(
all_settings: dict[str, Any], vizio_power_state: bool | None
) -> None:
"""Context manager to setup test for Vizio devices without including app specific patches."""
with patch(
"homeassistant.components.vizio.media_player.VizioAsync.get_all_settings",
return_value=all_settings,
), patch(
"homeassistant.components.vizio.media_player.VizioAsync.get_setting_options",
return_value=EQ_LIST,
), patch(
"homeassistant.components.vizio.media_player.VizioAsync.get_power_state",
return_value=vizio_power_state,
):
yield
async def _test_setup_tv(hass: HomeAssistant, vizio_power_state: bool | None) -> None:
"""Test Vizio TV entity setup."""
ha_power_state = _get_ha_power_state(vizio_power_state)
config_entry = MockConfigEntry(
domain=DOMAIN,
data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG),
unique_id=UNIQUE_ID,
)
async with _cm_for_test_setup_without_apps(
{"volume": int(MAX_VOLUME[VIZIO_DEVICE_CLASS_TV] / 2), "mute": "Off"},
vizio_power_state,
):
await _add_config_entry_to_hass(hass, config_entry)
attr = _get_attr_and_assert_base_attr(
hass, MediaPlayerDeviceClass.TV, ha_power_state
)
if ha_power_state == STATE_ON:
_assert_sources_and_volume(attr, VIZIO_DEVICE_CLASS_TV)
assert "sound_mode" not in attr
async def _test_setup_speaker(
hass: HomeAssistant, vizio_power_state: bool | None
) -> None:
"""Test Vizio Speaker entity setup."""
ha_power_state = _get_ha_power_state(vizio_power_state)
config_entry = MockConfigEntry(
domain=DOMAIN,
data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG),
unique_id=UNIQUE_ID,
)
audio_settings = {
"volume": int(MAX_VOLUME[VIZIO_DEVICE_CLASS_SPEAKER] / 2),
"mute": "Off",
"eq": CURRENT_EQ,
}
async with _cm_for_test_setup_without_apps(
audio_settings,
vizio_power_state,
):
with patch(
"homeassistant.components.vizio.media_player.VizioAsync.get_current_app_config",
) as service_call:
await _add_config_entry_to_hass(hass, config_entry)
attr = _get_attr_and_assert_base_attr(
hass, MediaPlayerDeviceClass.SPEAKER, ha_power_state
)
if ha_power_state == STATE_ON:
_assert_sources_and_volume(attr, VIZIO_DEVICE_CLASS_SPEAKER)
assert not service_call.called
assert "sound_mode" in attr
@asynccontextmanager
async def _cm_for_test_setup_tv_with_apps(
hass: HomeAssistant, device_config: dict[str, Any], app_config: dict[str, Any]
) -> None:
"""Context manager to setup test for Vizio TV with support for apps."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=vol.Schema(VIZIO_SCHEMA)(device_config), unique_id=UNIQUE_ID
)
async with _cm_for_test_setup_without_apps(
{"volume": int(MAX_VOLUME[VIZIO_DEVICE_CLASS_TV] / 2), "mute": "Off"},
True,
):
with patch(
"homeassistant.components.vizio.media_player.VizioAsync.get_current_app_config",
return_value=AppConfig(**app_config),
):
await _add_config_entry_to_hass(hass, config_entry)
attr = _get_attr_and_assert_base_attr(
hass, MediaPlayerDeviceClass.TV, STATE_ON
)
assert (
attr["volume_level"]
== float(int(MAX_VOLUME[VIZIO_DEVICE_CLASS_TV] / 2))
/ MAX_VOLUME[VIZIO_DEVICE_CLASS_TV]
)
yield
def _assert_source_list_with_apps(
list_to_test: list[str], attr: dict[str, Any]
) -> None:
"""Assert source list matches list_to_test after removing INPUT_APPS from list."""
for app_to_remove in INPUT_APPS:
if app_to_remove in list_to_test:
list_to_test.remove(app_to_remove)
assert attr[ATTR_INPUT_SOURCE_LIST] == list_to_test
async def _test_service(
hass: HomeAssistant,
domain: str,
vizio_func_name: str,
ha_service_name: str,
additional_service_data: dict[str, Any] | None,
*args,
**kwargs,
) -> None:
"""Test generic Vizio media player entity service."""
kwargs["log_api_exception"] = False
service_data = {ATTR_ENTITY_ID: ENTITY_ID}
if additional_service_data:
service_data.update(additional_service_data)
with patch(
f"homeassistant.components.vizio.media_player.VizioAsync.{vizio_func_name}"
) as service_call:
await hass.services.async_call(
domain,
ha_service_name,
service_data=service_data,
blocking=True,
)
assert service_call.called
if args or kwargs:
assert service_call.call_args == call(*args, **kwargs)
async def test_speaker_on(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio Speaker entity setup when on."""
await _test_setup_speaker(hass, True)
async def test_speaker_off(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio Speaker entity setup when off."""
await _test_setup_speaker(hass, False)
async def test_speaker_unavailable(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio Speaker entity setup when unavailable."""
await _test_setup_speaker(hass, None)
async def test_init_tv_on(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio TV entity setup when on."""
await _test_setup_tv(hass, True)
async def test_init_tv_off(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio TV entity setup when off."""
await _test_setup_tv(hass, False)
async def test_init_tv_unavailable(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio TV entity setup when unavailable."""
await _test_setup_tv(hass, None)
async def test_setup_unavailable_speaker(
hass: HomeAssistant, vizio_cant_connect: pytest.fixture
) -> None:
"""Test speaker entity sets up as unavailable."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID
)
await _add_config_entry_to_hass(hass, config_entry)
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 1
assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE
async def test_setup_unavailable_tv(
hass: HomeAssistant, vizio_cant_connect: pytest.fixture
) -> None:
"""Test TV entity sets up as unavailable."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID
)
await _add_config_entry_to_hass(hass, config_entry)
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 1
assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE
async def test_services(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test all Vizio media player entity services."""
await _test_setup_tv(hass, True)
await _test_service(hass, MP_DOMAIN, "pow_on", SERVICE_TURN_ON, None)
await _test_service(hass, MP_DOMAIN, "pow_off", SERVICE_TURN_OFF, None)
await _test_service(
hass,
MP_DOMAIN,
"mute_on",
SERVICE_VOLUME_MUTE,
{ATTR_MEDIA_VOLUME_MUTED: True},
)
await _test_service(
hass,
MP_DOMAIN,
"mute_off",
SERVICE_VOLUME_MUTE,
{ATTR_MEDIA_VOLUME_MUTED: False},
)
await _test_service(
hass,
MP_DOMAIN,
"set_input",
SERVICE_SELECT_SOURCE,
{ATTR_INPUT_SOURCE: "USB"},
"USB",
)
await _test_service(
hass, MP_DOMAIN, "vol_up", SERVICE_VOLUME_UP, None, num=DEFAULT_VOLUME_STEP
)
await _test_service(
hass, MP_DOMAIN, "vol_down", SERVICE_VOLUME_DOWN, None, num=DEFAULT_VOLUME_STEP
)
await _test_service(
hass,
MP_DOMAIN,
"vol_up",
SERVICE_VOLUME_SET,
{ATTR_MEDIA_VOLUME_LEVEL: 1},
num=(100 - 15),
)
await _test_service(
hass,
MP_DOMAIN,
"vol_down",
SERVICE_VOLUME_SET,
{ATTR_MEDIA_VOLUME_LEVEL: 0},
num=(15 - 0),
)
await _test_service(hass, MP_DOMAIN, "ch_up", SERVICE_MEDIA_NEXT_TRACK, None)
await _test_service(hass, MP_DOMAIN, "ch_down", SERVICE_MEDIA_PREVIOUS_TRACK, None)
await _test_service(
hass,
MP_DOMAIN,
"set_setting",
SERVICE_SELECT_SOUND_MODE,
{ATTR_SOUND_MODE: "Music"},
"audio",
"eq",
"Music",
)
# Test that the update_setting service does config validation/transformation correctly
await _test_service(
hass,
DOMAIN,
"set_setting",
SERVICE_UPDATE_SETTING,
{"setting_type": "Audio", "setting_name": "AV Delay", "new_value": "0"},
"audio",
"av_delay",
0,
)
await _test_service(
hass,
DOMAIN,
"set_setting",
SERVICE_UPDATE_SETTING,
{"setting_type": "Audio", "setting_name": "EQ", "new_value": "Music"},
"audio",
"eq",
"Music",
)
async def test_options_update(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test when config entry update event fires."""
await _test_setup_speaker(hass, True)
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
assert config_entry.options
new_options = config_entry.options.copy()
updated_options = {CONF_VOLUME_STEP: VOLUME_STEP}
new_options.update(updated_options)
hass.config_entries.async_update_entry(
entry=config_entry,
options=new_options,
)
assert config_entry.options == updated_options
await _test_service(
hass, MP_DOMAIN, "vol_up", SERVICE_VOLUME_UP, None, num=VOLUME_STEP
)
async def _test_update_availability_switch(
hass: HomeAssistant,
initial_power_state: bool | None,
final_power_state: bool | None,
caplog: pytest.fixture,
) -> None:
now = dt_util.utcnow()
future_interval = timedelta(minutes=1)
# Setup device as if time is right now
with patch("homeassistant.util.dt.utcnow", return_value=now):
await _test_setup_speaker(hass, initial_power_state)
# Clear captured logs so that only availability state changes are captured for
# future assertion
caplog.clear()
# Fast forward time to future twice to trigger update and assert vizio log message
for i in range(1, 3):
future = now + (future_interval * i)
with patch(
"homeassistant.components.vizio.media_player.VizioAsync.get_power_state",
return_value=final_power_state,
), patch("homeassistant.util.dt.utcnow", return_value=future), patch(
"homeassistant.util.utcnow", return_value=future
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
if final_power_state is None:
assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE
else:
assert hass.states.get(ENTITY_ID).state != STATE_UNAVAILABLE
# Ensure connection status messages from vizio.media_player appear exactly once
# (on availability state change)
vizio_log_list = [
log
for log in caplog.records
if log.name == "homeassistant.components.vizio.media_player"
]
assert len(vizio_log_list) == 1
async def test_update_unavailable_to_available(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device becomes available after being unavailable."""
await _test_update_availability_switch(hass, None, True, caplog)
async def test_update_available_to_unavailable(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device becomes unavailable after being available."""
await _test_update_availability_switch(hass, True, None, caplog)
async def test_setup_with_apps(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps."""
async with _cm_for_test_setup_tv_with_apps(
hass, MOCK_USER_VALID_TV_CONFIG, CURRENT_APP_CONFIG
):
attr = hass.states.get(ENTITY_ID).attributes
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + APP_NAME_LIST), attr)
assert CURRENT_APP in attr[ATTR_INPUT_SOURCE_LIST]
assert attr[ATTR_INPUT_SOURCE] == CURRENT_APP
assert attr["app_name"] == CURRENT_APP
assert "app_id" not in attr
await _test_service(
hass,
MP_DOMAIN,
"launch_app",
SERVICE_SELECT_SOURCE,
{ATTR_INPUT_SOURCE: CURRENT_APP},
CURRENT_APP,
APP_LIST,
)
async def test_setup_with_apps_include(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps and apps["include"] in config."""
async with _cm_for_test_setup_tv_with_apps(
hass, MOCK_TV_WITH_INCLUDE_CONFIG, CURRENT_APP_CONFIG
):
attr = hass.states.get(ENTITY_ID).attributes
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + [CURRENT_APP]), attr)
assert CURRENT_APP in attr[ATTR_INPUT_SOURCE_LIST]
assert attr[ATTR_INPUT_SOURCE] == CURRENT_APP
assert attr["app_name"] == CURRENT_APP
assert "app_id" not in attr
async def test_setup_with_apps_exclude(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps and apps["exclude"] in config."""
async with _cm_for_test_setup_tv_with_apps(
hass, MOCK_TV_WITH_EXCLUDE_CONFIG, CURRENT_APP_CONFIG
):
attr = hass.states.get(ENTITY_ID).attributes
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + [CURRENT_APP]), attr)
assert CURRENT_APP in attr[ATTR_INPUT_SOURCE_LIST]
assert attr[ATTR_INPUT_SOURCE] == CURRENT_APP
assert attr["app_name"] == CURRENT_APP
assert "app_id" not in attr
async def test_setup_with_apps_additional_apps_config(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps and apps["additional_configs"] in config."""
async with _cm_for_test_setup_tv_with_apps(
hass,
MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG,
ADDITIONAL_APP_CONFIG["config"],
):
attr = hass.states.get(ENTITY_ID).attributes
assert attr[ATTR_INPUT_SOURCE_LIST].count(CURRENT_APP) == 1
_assert_source_list_with_apps(
list(
INPUT_LIST_WITH_APPS
+ APP_NAME_LIST
+ [
app["name"]
for app in MOCK_TV_WITH_ADDITIONAL_APPS_CONFIG[CONF_APPS][
CONF_ADDITIONAL_CONFIGS
]
if app["name"] not in APP_NAME_LIST
]
),
attr,
)
assert ADDITIONAL_APP_CONFIG["name"] in attr[ATTR_INPUT_SOURCE_LIST]
assert attr[ATTR_INPUT_SOURCE] == ADDITIONAL_APP_CONFIG["name"]
assert attr["app_name"] == ADDITIONAL_APP_CONFIG["name"]
assert "app_id" not in attr
await _test_service(
hass,
MP_DOMAIN,
"launch_app",
SERVICE_SELECT_SOURCE,
{ATTR_INPUT_SOURCE: "Netflix"},
"Netflix",
APP_LIST,
)
await _test_service(
hass,
MP_DOMAIN,
"launch_app_config",
SERVICE_SELECT_SOURCE,
{ATTR_INPUT_SOURCE: CURRENT_APP},
**CUSTOM_CONFIG,
)
# Test that invalid app does nothing
with patch(
"homeassistant.components.vizio.media_player.VizioAsync.launch_app"
) as service_call1, patch(
"homeassistant.components.vizio.media_player.VizioAsync.launch_app_config"
) as service_call2:
await hass.services.async_call(
MP_DOMAIN,
SERVICE_SELECT_SOURCE,
service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_INPUT_SOURCE: "_"},
blocking=True,
)
assert not service_call1.called
assert not service_call2.called
def test_invalid_apps_config(hass: HomeAssistant):
"""Test that schema validation fails on certain conditions."""
with raises(vol.Invalid):
vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_TV_APPS_FAILURE)
with raises(vol.Invalid):
vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE)
async def test_setup_with_unknown_app_config(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps where app config returned is unknown."""
async with _cm_for_test_setup_tv_with_apps(
hass, MOCK_USER_VALID_TV_CONFIG, UNKNOWN_APP_CONFIG
):
attr = hass.states.get(ENTITY_ID).attributes
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + APP_NAME_LIST), attr)
assert attr[ATTR_INPUT_SOURCE] == UNKNOWN_APP
assert attr["app_name"] == UNKNOWN_APP
assert attr["app_id"] == UNKNOWN_APP_CONFIG
async def test_setup_with_no_running_app(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps where no app is running."""
async with _cm_for_test_setup_tv_with_apps(
hass, MOCK_USER_VALID_TV_CONFIG, vars(AppConfig())
):
attr = hass.states.get(ENTITY_ID).attributes
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + APP_NAME_LIST), attr)
assert attr[ATTR_INPUT_SOURCE] == "CAST"
assert "app_id" not in attr
assert "app_name" not in attr
async def test_setup_tv_without_mute(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test Vizio TV entity setup when mute property isn't returned by Vizio API."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG),
unique_id=UNIQUE_ID,
)
async with _cm_for_test_setup_without_apps(
{"volume": int(MAX_VOLUME[VIZIO_DEVICE_CLASS_TV] / 2)},
STATE_ON,
):
await _add_config_entry_to_hass(hass, config_entry)
attr = _get_attr_and_assert_base_attr(hass, MediaPlayerDeviceClass.TV, STATE_ON)
_assert_sources_and_volume(attr, VIZIO_DEVICE_CLASS_TV)
assert "sound_mode" not in attr
assert "is_volume_muted" not in attr
async def test_apps_update(
hass: HomeAssistant,
vizio_connect: pytest.fixture,
vizio_update_with_apps: pytest.fixture,
caplog: pytest.fixture,
) -> None:
"""Test device setup with apps where no app is running."""
with patch(
"homeassistant.components.vizio.gen_apps_list_from_url",
return_value=None,
):
async with _cm_for_test_setup_tv_with_apps(
hass, MOCK_USER_VALID_TV_CONFIG, vars(AppConfig())
):
# Check source list, remove TV inputs, and verify that the integration is
# using the default APPS list
sources = hass.states.get(ENTITY_ID).attributes[ATTR_INPUT_SOURCE_LIST]
apps = list(set(sources) - set(INPUT_LIST))
assert len(apps) == len(APPS)
with patch(
"homeassistant.components.vizio.gen_apps_list_from_url",
return_value=APP_LIST,
):
async_fire_time_changed(hass, dt_util.now() + timedelta(days=2))
await hass.async_block_till_done()
# Check source list, remove TV inputs, and verify that the integration is
# now using the APP_LIST list
sources = hass.states.get(ENTITY_ID).attributes[ATTR_INPUT_SOURCE_LIST]
apps = list(set(sources) - set(INPUT_LIST))
assert len(apps) == len(APP_LIST)
async def test_vizio_update_with_apps_on_input(
hass: HomeAssistant, vizio_connect, vizio_update_with_apps_on_input
) -> None:
"""Test a vizio TV with apps that is on a TV input."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=vol.Schema(VIZIO_SCHEMA)(MOCK_USER_VALID_TV_CONFIG),
unique_id=UNIQUE_ID,
)
await _add_config_entry_to_hass(hass, config_entry)
attr = _get_attr_and_assert_base_attr(hass, MediaPlayerDeviceClass.TV, STATE_ON)
# app ID should not be in the attributes
assert "app_id" not in attr
|
rohitranjan1991/home-assistant
|
tests/components/vizio/test_media_player.py
|
Python
|
mit
| 25,064
|
'''
Copyright 2015-2020 HENNGE K.K. (formerly known as HDE, Inc.)
Licensed under MIT.
'''
import json
def read_event(path):
with open(path) as event:
data = json.load(event)
return data
|
HDE/python-lambda-local
|
lambda_local/event.py
|
Python
|
mit
| 206
|
import numpy as np
import matplotlib.pyplot as plt
import pymc as pm
def main():
sample_size = 100000
expected_value = lambda_ = 4.5
N_samples = range(1, sample_size, 100)
for k in range(3):
samples = pm.rpoisson(lambda_, size=sample_size)
partial_average = [samples[:i].mean() for i in N_samples]
label = "average of $n$ samples; seq. %d" % k
plt.plot(N_samples, partial_average, lw=1.5, label=label)
plt.plot(N_samples, expected_value * np.ones_like(partial_average),
ls="--", label="true expected value", c="k")
plt.ylim(4.35, 4.65)
plt.title("Convergence of the average of \n random variables to its" +
"expected value")
plt.ylabel("average of $n$ samples")
plt.xlabel("# of samples, $n$")
plt.legend()
plt.show()
if __name__ == '__main__':
main()
|
noelevans/sandpit
|
bayesian_methods_for_hackers/LoLN_convergence_examples_ch04.py
|
Python
|
mit
| 867
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2017-01-09 19:17
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('productdb', '0024_auto_20161227_1015'),
]
operations = [
migrations.AlterField(
model_name='productcheck',
name='input_product_ids',
field=models.CharField(help_text='unordered Product IDs, separated by line breaks or semicolon', max_length=65536, verbose_name='Product ID list'),
),
]
|
hoelsner/product-database
|
app/productdb/migrations/0025_auto_20170109_2017.py
|
Python
|
mit
| 579
|
"""
This file includes commonly used utilities for this app.
"""
from datetime import datetime
today = datetime.now()
year = today.year
month = today.month
day = today.day
# Following are for images upload helper functions. The first two are used for product upload for the front and back.
# The last two are used for design product upload for the front and back.
def front_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/owner_<id>/product_<id>/Y/m/d/front/<filename>
return 'product_imgs/owner_{0}/product_{1}/{2}/{3}/{4}/front/{5}'.format(instance.owner.id, instance.slug, year, month, day, filename)
def back_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/owner_<id>/product_<id>/Y/m/d/back/<filename>
return 'product_imgs/owner_{0}/product_{1}/{2}/{3}/{4}/back/{5}'.format(instance.owner.id, instance.slug, year, month, day, filename)
'''
def front_design_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/designer_<id>/design_product_<id>/Y/m/d/front/<filename>
return 'product_imgs/designer_{0}/design_product_{1}/{2}/{3}/{4}/front/{5}'.format(instance.designer.id, instance.id, year, month, day, filename)
def back_design_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/designer_<id>/design_product_<id>/Y/m/d/back/<filename>
return 'product_imgs/designer_{0}/design_product_{1}/{2}/{3}/{4}/back/{5}'.format(instance.designer.id, instance.id, year, month, day, filename)
'''
def fill_category_tree(model, deep=0, parent_id=0, tree=[]):
'''
NAME::
fill_category_tree
DESCRIPTION::
一般用来针对带有parent产品分类表字段的进行遍历,并生成树形结构
PARAMETERS::
:param model: 被遍历的model,具有parent属性
:param deep: 本例中,为了明确表示父子的层次关系,用短线---的多少来表示缩进
:param parent_id: 表示从哪个父类开始,=0表示从最顶层开始
:param tree: 要生成的树形tuple
RETURN::
这里是不需要返回值的,但是如果某个调用中需要可以画蛇添足一下
USAGE::
调用时,可以这样:
choices = [()]
fill_topic_tree(choices=choices)
这里使用[],而不是(),是因为只有[],才能做为“引用”类型传递数据。
'''
if parent_id == 0:
ts = model.objects.filter(parent = None)
# tree[0] += ((None, '选择产品类型'),)
for t in ts:
tmp = [()]
fill_category_tree(4, t.id, tmp)
tree[0] += ((t.id, '-'*deep + t.name,),)
for tt in tmp[0]:
tree[0] += (tt,)
else:
ts = Category.objects.filter(parent_id = parent_id)
for t in ts:
tree[0] += ((t.id, '-'*deep + t.name,),)
fill_category_tree(deep + 4, t.id, tree)
return tree
|
sunlaiqi/fundiy
|
src/shop/utils.py
|
Python
|
mit
| 2,973
|
import pytest
from freezegun import freeze_time
import demistomock as demisto
integration_params = {
'url': 'http://test.com',
'credentials': {'identifier': 'test', 'password': 'pass'},
'fetch_time': '3 days',
'proxy': 'false',
'unsecure': 'false',
}
@pytest.fixture(autouse=True)
def set_mocks(mocker):
mocker.patch.object(demisto, 'params', return_value=integration_params)
@freeze_time("2021-07-10T16:34:14.758295 UTC+1")
def test_fetch_incidents_first_time_fetch(mocker):
"""
Given
- fetch incidents command
- command args
When
- mock the integration parameters
Then
- Validate that the last_time is as the now time(not changed, not of the incident)
"""
mocker.patch.object(demisto, 'command', return_value='fetch-incidents')
from RedLock import fetch_incidents
mocker.patch('RedLock.req', return_value=[])
_, next_run = fetch_incidents()
assert next_run == 1625938454758
def test_redlock_list_scans(mocker):
"""
Given
- The response from the API call of redlock-list-scans command.
When
- calling redlock-list-scans
Then
- Validate that the readable output and the context entry of the command is as expected
"""
from RedLock import redlock_list_scans
list_scans_response = {
'data': [{
'id': '111111111',
'attributes': {
'name': ['test name'],
'type': ['test type'],
'user': ['test user'],
'scanTime': '2021-10-18T14:38:53.654174'
}
}]
}
expected_readable_output = '### Scans List:\n|ID|Name|Scan Time|Type|User|\n|---|---|---|---|---|\n| 111111111 |' \
' test name | 2021-10-18T14:38:53.654174 | test type | test user |\n'
expected_context_entry = {'Redlock.Scans(val.id == obj.id)': [{'id': '111111111',
'name': ['test name'],
'type': ['test type'],
'user': ['test user'],
'scanTime': '2021-10-18T14:38:53.654174'}]}
mocker.patch('RedLock.req', return_value=list_scans_response)
mocker.patch.object(demisto, 'results')
redlock_list_scans()
assert demisto.results.call_args[0][0].get('HumanReadable') == expected_readable_output
assert demisto.results.call_args[0][0].get('EntryContext') == expected_context_entry
def test_redlock_get_scan_status(mocker):
"""
Given
- The response from the API call of redlock-get-scan-status command.
When
- calling redlock-get-scan-status
Then
- Validate that the readable output and the context entry of the command is as expected
"""
from RedLock import redlock_get_scan_status
get_status_response = {
'data': {
'id': '111111111',
'attributes': {
'status': 'test'
}
}
}
expected_readable_output = '### Scan Status:\n|ID|Status|\n|---|---|\n| 111111111 | test |\n'
expected_context_entry = {'Redlock.Scans(val.id == obj.id)': {'id': '111111111',
'status': 'test'}}
mocker.patch('RedLock.req', return_value=get_status_response)
mocker.patch.object(demisto, 'results')
redlock_get_scan_status()
assert demisto.results.call_args[0][0].get('HumanReadable') == expected_readable_output
assert demisto.results.call_args[0][0].get('EntryContext') == expected_context_entry
def test_redlock_get_scan_results(mocker):
"""
Given
- The response from the API call of redlock-get-scan-result command.
When
- calling redlock-get-scan-result
Then
- Validate that the readable output and the context entry of the command is as expected
"""
from RedLock import redlock_get_scan_results
get_result_response = {
'data': [{
'id': '111111111',
'attributes': {
'name': 'test',
'policyId': '2222',
'desc': 'test',
'severity': 'high'
}}]
}
expected_readable_output = '### Scan Results:\n|Description|ID|Name|Policy ID|Severity|\n|---|---|---|---|---|\n|' \
' test | 111111111 | test | 2222 | high |\n'
expected_context_entry = {'Redlock.Scans(val.id == obj.id)': {'id': None,
'results': [
{'id': '111111111',
'attributes': {'name': 'test',
'policyId': '2222',
'desc': 'test',
'severity': 'high'}}]}}
mocker.patch('RedLock.req', return_value=get_result_response)
mocker.patch.object(demisto, 'results')
redlock_get_scan_results()
assert demisto.results.call_args[0][0].get('HumanReadable') == expected_readable_output
assert demisto.results.call_args[0][0].get('EntryContext') == expected_context_entry
|
VirusTotal/content
|
Packs/PrismaCloud/Integrations/RedLock/RedLock_test.py
|
Python
|
mit
| 5,638
|
""" Run this file to run bots as a standalone application, detached from the webapp """
from snoohelper.utils.teams import SlackTeamsController
TESTING = False
def main():
if not TESTING:
SlackTeamsController("teams.ini", 'snoohelper_master.db')
else:
SlackTeamsController("teams_test.ini", 'snoohelper_test.db')
if __name__ == "__main__":
main()
|
Santi871/SnooHelper
|
standalone_bot.py
|
Python
|
mit
| 381
|
f = open("io/data/file1")
print(f.read(5))
print(f.readline())
print(f.read())
|
aitjcize/micropython
|
tests/io/file1.py
|
Python
|
mit
| 79
|
from subprocess import check_call, call, Popen, PIPE
import os
import textwrap
import glob
os.putenv("DEBIAN_FRONTEND", "noninteractive")
#######
## Plumbing
#######
def get_output(cmd, **kwargs):
check = kwargs.pop("check", True)
kwargs["stdout"] = PIPE
p = Popen(cmd, **kwargs)
stdout, stderr = p.communicate()
if check and p.returncode:
raise ValueError("%r return code %s" % (cmd, p.returncode))
return stdout
def sh(cmd):
check_call(cmd, shell=True)
def shh(cmd):
get_output(cmd, shell=True)
#######
## Packages
#######
def add_apt_key(url):
sh("wget -O - %s | apt-key add -" % url)
def add_apt_repo(name, spec):
with file("/etc/apt/sources.list.d/%s.list" % name, "wb") as outf:
outf.write("deb %s\n" % spec)
sh("apt-get update")
def install(*packages):
sh("apt-get install -y --no-install-recommends %s" % " ".join(packages))
def get_packages():
return set(
l.split()[0]
for l in get_output("dpkg --get-selections", shell=True).splitlines()
if l
)
def has_package(*check_packages):
all_packages = get_packages()
return (set(check_packages) <= all_packages)
def setup_apt_cacher_ng(apt_cacher_ng_url):
proxy_config_file = "/etc/apt/apt.conf.d/90proxy"
proxy_url = apt_cacher_ng_url.rstrip("/")
if proxy_url in read(proxy_config_file):
print "Apt proxy already configured"
return
try:
import urllib
data = urllib.urlopen(apt_cacher_ng_url).read()
except:
print "Could not acquire apt proxy settings"
return
if "APT Reconfiguration required" in data: # Looks like a valid apt-cacher-ng page
write(proxy_config_file, """Acquire::http { Proxy "%s"; };""" % proxy_url)
print "Apt proxy activated"
else:
print "Not a proper apt proxy"
#######
## File damagement
#######
def has_file(path):
return os.path.exists(path)
def nuke(*specs):
for spec in specs:
for filename in glob.glob(spec):
if os.path.isfile(filename):
print "nuking: %s" % filename
os.unlink(filename)
def write(filename, content):
with file(filename, "wb") as out_f:
out_f.write(textwrap.dedent(content.strip("\n\r")))
def read(filename):
if os.path.isfile(filename):
with file(filename, "rb") as in_f:
return in_f.read()
return ""
#######
## Services
#######
def restart(service):
sh("service %s restart" % service)
#######
## Macros
#######
def configure_etckeeper():
if not has_package("etckeeper"):
install("etckeeper", "git-core")
write("/etc/etckeeper/etckeeper.conf", """
VCS="git"
GIT_COMMIT_OPTIONS=""
HIGHLEVEL_PACKAGE_MANAGER=apt
LOWLEVEL_PACKAGE_MANAGER=dpkg
""")
sh("etckeeper init")
sh("etckeeper commit initial")
print "etckeeper provisioned"
|
akx/requiem
|
requiem.py
|
Python
|
mit
| 2,960
|
# Python stubs generated by omniidl from /usr/local/share/idl/omniORB/COS/CosRelationships.idl
# DO NOT EDIT THIS FILE!
import omniORB, _omnipy
from omniORB import CORBA, PortableServer
_0_CORBA = CORBA
_omnipy.checkVersion(4,2, __file__, 1)
try:
property
except NameError:
def property(*args):
return None
# #include "corbaidl.idl"
import corbaidl_idl
_0_CORBA = omniORB.openModule("CORBA")
_0_CORBA__POA = omniORB.openModule("CORBA__POA")
# #include "boxes.idl"
import boxes_idl
_0_CORBA = omniORB.openModule("CORBA")
_0_CORBA__POA = omniORB.openModule("CORBA__POA")
# #include "ir.idl"
import ir_idl
_0_CORBA = omniORB.openModule("CORBA")
_0_CORBA__POA = omniORB.openModule("CORBA__POA")
# #include "CosObjectIdentity.idl"
import CosObjectIdentity_idl
_0_CosObjectIdentity = omniORB.openModule("CosObjectIdentity")
_0_CosObjectIdentity__POA = omniORB.openModule("CosObjectIdentity__POA")
#
# Start of module "CosRelationships"
#
__name__ = "CosRelationships"
_0_CosRelationships = omniORB.openModule("CosRelationships", r"/usr/local/share/idl/omniORB/COS/CosRelationships.idl")
_0_CosRelationships__POA = omniORB.openModule("CosRelationships__POA", r"/usr/local/share/idl/omniORB/COS/CosRelationships.idl")
# forward interface RoleFactory;
_0_CosRelationships._d_RoleFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RoleFactory:1.0", "RoleFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleFactory:1.0"] = _0_CosRelationships._d_RoleFactory
# forward interface RelationshipFactory;
_0_CosRelationships._d_RelationshipFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipFactory:1.0", "RelationshipFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory:1.0"] = _0_CosRelationships._d_RelationshipFactory
# forward interface Relationship;
_0_CosRelationships._d_Relationship = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Relationship:1.0", "Relationship")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"] = _0_CosRelationships._d_Relationship
# forward interface Role;
_0_CosRelationships._d_Role = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Role:1.0", "Role")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"] = _0_CosRelationships._d_Role
# forward interface RelationshipIterator;
_0_CosRelationships._d_RelationshipIterator = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipIterator:1.0", "RelationshipIterator")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipIterator:1.0"] = _0_CosRelationships._d_RelationshipIterator
# typedef ... RelatedObject
class RelatedObject:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelatedObject:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RelatedObject = RelatedObject
_0_CosRelationships._d_RelatedObject = omniORB.typeMapping["IDL:omg.org/CORBA/Object:1.0"]
_0_CosRelationships._ad_RelatedObject = (omniORB.tcInternal.tv_alias, RelatedObject._NP_RepositoryId, "RelatedObject", omniORB.typeMapping["IDL:omg.org/CORBA/Object:1.0"])
_0_CosRelationships._tc_RelatedObject = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RelatedObject)
omniORB.registerType(RelatedObject._NP_RepositoryId, _0_CosRelationships._ad_RelatedObject, _0_CosRelationships._tc_RelatedObject)
del RelatedObject
# typedef ... Roles
class Roles:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Roles:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.Roles = Roles
_0_CosRelationships._d_Roles = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], 0)
_0_CosRelationships._ad_Roles = (omniORB.tcInternal.tv_alias, Roles._NP_RepositoryId, "Roles", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], 0))
_0_CosRelationships._tc_Roles = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_Roles)
omniORB.registerType(Roles._NP_RepositoryId, _0_CosRelationships._ad_Roles, _0_CosRelationships._tc_Roles)
del Roles
# typedef ... RoleName
class RoleName:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleName:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RoleName = RoleName
_0_CosRelationships._d_RoleName = (omniORB.tcInternal.tv_string,0)
_0_CosRelationships._ad_RoleName = (omniORB.tcInternal.tv_alias, RoleName._NP_RepositoryId, "RoleName", (omniORB.tcInternal.tv_string,0))
_0_CosRelationships._tc_RoleName = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RoleName)
omniORB.registerType(RoleName._NP_RepositoryId, _0_CosRelationships._ad_RoleName, _0_CosRelationships._tc_RoleName)
del RoleName
# typedef ... RoleNames
class RoleNames:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleNames:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RoleNames = RoleNames
_0_CosRelationships._d_RoleNames = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], 0)
_0_CosRelationships._ad_RoleNames = (omniORB.tcInternal.tv_alias, RoleNames._NP_RepositoryId, "RoleNames", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], 0))
_0_CosRelationships._tc_RoleNames = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RoleNames)
omniORB.registerType(RoleNames._NP_RepositoryId, _0_CosRelationships._ad_RoleNames, _0_CosRelationships._tc_RoleNames)
del RoleNames
# struct NamedRole
_0_CosRelationships.NamedRole = omniORB.newEmptyClass()
class NamedRole (omniORB.StructBase):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/NamedRole:1.0"
def __init__(self, name, aRole):
self.name = name
self.aRole = aRole
_0_CosRelationships.NamedRole = NamedRole
_0_CosRelationships._d_NamedRole = (omniORB.tcInternal.tv_struct, NamedRole, NamedRole._NP_RepositoryId, "NamedRole", "name", omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], "aRole", omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"])
_0_CosRelationships._tc_NamedRole = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_NamedRole)
omniORB.registerType(NamedRole._NP_RepositoryId, _0_CosRelationships._d_NamedRole, _0_CosRelationships._tc_NamedRole)
del NamedRole
# typedef ... NamedRoles
class NamedRoles:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/NamedRoles:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.NamedRoles = NamedRoles
_0_CosRelationships._d_NamedRoles = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRole:1.0"], 0)
_0_CosRelationships._ad_NamedRoles = (omniORB.tcInternal.tv_alias, NamedRoles._NP_RepositoryId, "NamedRoles", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRole:1.0"], 0))
_0_CosRelationships._tc_NamedRoles = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_NamedRoles)
omniORB.registerType(NamedRoles._NP_RepositoryId, _0_CosRelationships._ad_NamedRoles, _0_CosRelationships._tc_NamedRoles)
del NamedRoles
# struct RelationshipHandle
_0_CosRelationships.RelationshipHandle = omniORB.newEmptyClass()
class RelationshipHandle (omniORB.StructBase):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipHandle:1.0"
def __init__(self, the_relationship, constant_random_id):
self.the_relationship = the_relationship
self.constant_random_id = constant_random_id
_0_CosRelationships.RelationshipHandle = RelationshipHandle
_0_CosRelationships._d_RelationshipHandle = (omniORB.tcInternal.tv_struct, RelationshipHandle, RelationshipHandle._NP_RepositoryId, "RelationshipHandle", "the_relationship", omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"], "constant_random_id", omniORB.typeMapping["IDL:omg.org/CosObjectIdentity/ObjectIdentifier:1.0"])
_0_CosRelationships._tc_RelationshipHandle = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RelationshipHandle)
omniORB.registerType(RelationshipHandle._NP_RepositoryId, _0_CosRelationships._d_RelationshipHandle, _0_CosRelationships._tc_RelationshipHandle)
del RelationshipHandle
# typedef ... RelationshipHandles
class RelationshipHandles:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipHandles:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RelationshipHandles = RelationshipHandles
_0_CosRelationships._d_RelationshipHandles = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], 0)
_0_CosRelationships._ad_RelationshipHandles = (omniORB.tcInternal.tv_alias, RelationshipHandles._NP_RepositoryId, "RelationshipHandles", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], 0))
_0_CosRelationships._tc_RelationshipHandles = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RelationshipHandles)
omniORB.registerType(RelationshipHandles._NP_RepositoryId, _0_CosRelationships._ad_RelationshipHandles, _0_CosRelationships._tc_RelationshipHandles)
del RelationshipHandles
# interface RelationshipFactory
_0_CosRelationships._d_RelationshipFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipFactory:1.0", "RelationshipFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory:1.0"] = _0_CosRelationships._d_RelationshipFactory
_0_CosRelationships.RelationshipFactory = omniORB.newEmptyClass()
class RelationshipFactory :
_NP_RepositoryId = _0_CosRelationships._d_RelationshipFactory[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# struct NamedRoleType
_0_CosRelationships.RelationshipFactory.NamedRoleType = omniORB.newEmptyClass()
class NamedRoleType (omniORB.StructBase):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleType:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.NamedRoleType"
def __init__(self, name, named_role_type):
self.name = name
self.named_role_type = named_role_type
_d_NamedRoleType = _0_CosRelationships.RelationshipFactory._d_NamedRoleType = (omniORB.tcInternal.tv_struct, NamedRoleType, NamedRoleType._NP_RepositoryId, "NamedRoleType", "name", omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], "named_role_type", omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"])
_tc_NamedRoleType = omniORB.tcInternal.createTypeCode(_d_NamedRoleType)
omniORB.registerType(NamedRoleType._NP_RepositoryId, _d_NamedRoleType, _tc_NamedRoleType)
# typedef ... NamedRoleTypes
class NamedRoleTypes:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleTypes:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_d_NamedRoleTypes = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleType:1.0"], 0)
_ad_NamedRoleTypes = (omniORB.tcInternal.tv_alias, NamedRoleTypes._NP_RepositoryId, "NamedRoleTypes", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleType:1.0"], 0))
_tc_NamedRoleTypes = omniORB.tcInternal.createTypeCode(_ad_NamedRoleTypes)
omniORB.registerType(NamedRoleTypes._NP_RepositoryId, _ad_NamedRoleTypes, _tc_NamedRoleTypes)
# exception RoleTypeError
_0_CosRelationships.RelationshipFactory.RoleTypeError = omniORB.newEmptyClass()
class RoleTypeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/RoleTypeError:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.RoleTypeError"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_RoleTypeError = (omniORB.tcInternal.tv_except, RoleTypeError, RoleTypeError._NP_RepositoryId, "RoleTypeError", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_RoleTypeError = omniORB.tcInternal.createTypeCode(_d_RoleTypeError)
omniORB.registerType(RoleTypeError._NP_RepositoryId, _d_RoleTypeError, _tc_RoleTypeError)
# exception MaxCardinalityExceeded
_0_CosRelationships.RelationshipFactory.MaxCardinalityExceeded = omniORB.newEmptyClass()
class MaxCardinalityExceeded (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/MaxCardinalityExceeded:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.MaxCardinalityExceeded"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_MaxCardinalityExceeded = (omniORB.tcInternal.tv_except, MaxCardinalityExceeded, MaxCardinalityExceeded._NP_RepositoryId, "MaxCardinalityExceeded", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_MaxCardinalityExceeded = omniORB.tcInternal.createTypeCode(_d_MaxCardinalityExceeded)
omniORB.registerType(MaxCardinalityExceeded._NP_RepositoryId, _d_MaxCardinalityExceeded, _tc_MaxCardinalityExceeded)
# exception DegreeError
_0_CosRelationships.RelationshipFactory.DegreeError = omniORB.newEmptyClass()
class DegreeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/DegreeError:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.DegreeError"
def __init__(self, required_degree):
CORBA.UserException.__init__(self, required_degree)
self.required_degree = required_degree
_d_DegreeError = (omniORB.tcInternal.tv_except, DegreeError, DegreeError._NP_RepositoryId, "DegreeError", "required_degree", omniORB.tcInternal.tv_ushort)
_tc_DegreeError = omniORB.tcInternal.createTypeCode(_d_DegreeError)
omniORB.registerType(DegreeError._NP_RepositoryId, _d_DegreeError, _tc_DegreeError)
# exception DuplicateRoleName
_0_CosRelationships.RelationshipFactory.DuplicateRoleName = omniORB.newEmptyClass()
class DuplicateRoleName (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/DuplicateRoleName:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.DuplicateRoleName"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_DuplicateRoleName = (omniORB.tcInternal.tv_except, DuplicateRoleName, DuplicateRoleName._NP_RepositoryId, "DuplicateRoleName", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_DuplicateRoleName = omniORB.tcInternal.createTypeCode(_d_DuplicateRoleName)
omniORB.registerType(DuplicateRoleName._NP_RepositoryId, _d_DuplicateRoleName, _tc_DuplicateRoleName)
# exception UnknownRoleName
_0_CosRelationships.RelationshipFactory.UnknownRoleName = omniORB.newEmptyClass()
class UnknownRoleName (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/UnknownRoleName:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.UnknownRoleName"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_UnknownRoleName = (omniORB.tcInternal.tv_except, UnknownRoleName, UnknownRoleName._NP_RepositoryId, "UnknownRoleName", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_UnknownRoleName = omniORB.tcInternal.createTypeCode(_d_UnknownRoleName)
omniORB.registerType(UnknownRoleName._NP_RepositoryId, _d_UnknownRoleName, _tc_UnknownRoleName)
_0_CosRelationships.RelationshipFactory = RelationshipFactory
_0_CosRelationships._tc_RelationshipFactory = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RelationshipFactory)
omniORB.registerType(RelationshipFactory._NP_RepositoryId, _0_CosRelationships._d_RelationshipFactory, _0_CosRelationships._tc_RelationshipFactory)
# RelationshipFactory operations and attributes
RelationshipFactory._d__get_relationship_type = ((),(omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"],),None)
RelationshipFactory._d__get_degree = ((),(omniORB.tcInternal.tv_ushort,),None)
RelationshipFactory._d__get_named_role_types = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleTypes:1.0"],),None)
RelationshipFactory._d_create = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"], ), (omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"], ), {_0_CosRelationships.RelationshipFactory.RoleTypeError._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_RoleTypeError, _0_CosRelationships.RelationshipFactory.MaxCardinalityExceeded._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_MaxCardinalityExceeded, _0_CosRelationships.RelationshipFactory.DegreeError._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_DegreeError, _0_CosRelationships.RelationshipFactory.DuplicateRoleName._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_DuplicateRoleName, _0_CosRelationships.RelationshipFactory.UnknownRoleName._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_UnknownRoleName})
# RelationshipFactory object reference
class _objref_RelationshipFactory (CORBA.Object):
_NP_RepositoryId = RelationshipFactory._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def _get_relationship_type(self, *args):
return self._obj.invoke("_get_relationship_type", _0_CosRelationships.RelationshipFactory._d__get_relationship_type, args)
relationship_type = property(_get_relationship_type)
def _get_degree(self, *args):
return self._obj.invoke("_get_degree", _0_CosRelationships.RelationshipFactory._d__get_degree, args)
degree = property(_get_degree)
def _get_named_role_types(self, *args):
return self._obj.invoke("_get_named_role_types", _0_CosRelationships.RelationshipFactory._d__get_named_role_types, args)
named_role_types = property(_get_named_role_types)
def create(self, *args):
return self._obj.invoke("create", _0_CosRelationships.RelationshipFactory._d_create, args)
omniORB.registerObjref(RelationshipFactory._NP_RepositoryId, _objref_RelationshipFactory)
_0_CosRelationships._objref_RelationshipFactory = _objref_RelationshipFactory
del RelationshipFactory, _objref_RelationshipFactory
# RelationshipFactory skeleton
__name__ = "CosRelationships__POA"
class RelationshipFactory (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.RelationshipFactory._NP_RepositoryId
_omni_op_d = {"_get_relationship_type": _0_CosRelationships.RelationshipFactory._d__get_relationship_type, "_get_degree": _0_CosRelationships.RelationshipFactory._d__get_degree, "_get_named_role_types": _0_CosRelationships.RelationshipFactory._d__get_named_role_types, "create": _0_CosRelationships.RelationshipFactory._d_create}
RelationshipFactory._omni_skeleton = RelationshipFactory
_0_CosRelationships__POA.RelationshipFactory = RelationshipFactory
omniORB.registerSkeleton(RelationshipFactory._NP_RepositoryId, RelationshipFactory)
del RelationshipFactory
__name__ = "CosRelationships"
# interface Relationship
_0_CosRelationships._d_Relationship = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Relationship:1.0", "Relationship")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"] = _0_CosRelationships._d_Relationship
_0_CosRelationships.Relationship = omniORB.newEmptyClass()
class Relationship (_0_CosObjectIdentity.IdentifiableObject):
_NP_RepositoryId = _0_CosRelationships._d_Relationship[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# exception CannotUnlink
_0_CosRelationships.Relationship.CannotUnlink = omniORB.newEmptyClass()
class CannotUnlink (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Relationship/CannotUnlink:1.0"
_NP_ClassName = "CosRelationships.Relationship.CannotUnlink"
def __init__(self, offending_roles):
CORBA.UserException.__init__(self, offending_roles)
self.offending_roles = offending_roles
_d_CannotUnlink = (omniORB.tcInternal.tv_except, CannotUnlink, CannotUnlink._NP_RepositoryId, "CannotUnlink", "offending_roles", omniORB.typeMapping["IDL:omg.org/CosRelationships/Roles:1.0"])
_tc_CannotUnlink = omniORB.tcInternal.createTypeCode(_d_CannotUnlink)
omniORB.registerType(CannotUnlink._NP_RepositoryId, _d_CannotUnlink, _tc_CannotUnlink)
_0_CosRelationships.Relationship = Relationship
_0_CosRelationships._tc_Relationship = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_Relationship)
omniORB.registerType(Relationship._NP_RepositoryId, _0_CosRelationships._d_Relationship, _0_CosRelationships._tc_Relationship)
# Relationship operations and attributes
Relationship._d__get_named_roles = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"],),None)
Relationship._d_destroy = ((), (), {_0_CosRelationships.Relationship.CannotUnlink._NP_RepositoryId: _0_CosRelationships.Relationship._d_CannotUnlink})
# Relationship object reference
class _objref_Relationship (_0_CosObjectIdentity._objref_IdentifiableObject):
_NP_RepositoryId = Relationship._NP_RepositoryId
def __init__(self, obj):
_0_CosObjectIdentity._objref_IdentifiableObject.__init__(self, obj)
def _get_named_roles(self, *args):
return self._obj.invoke("_get_named_roles", _0_CosRelationships.Relationship._d__get_named_roles, args)
named_roles = property(_get_named_roles)
def destroy(self, *args):
return self._obj.invoke("destroy", _0_CosRelationships.Relationship._d_destroy, args)
omniORB.registerObjref(Relationship._NP_RepositoryId, _objref_Relationship)
_0_CosRelationships._objref_Relationship = _objref_Relationship
del Relationship, _objref_Relationship
# Relationship skeleton
__name__ = "CosRelationships__POA"
class Relationship (_0_CosObjectIdentity__POA.IdentifiableObject):
_NP_RepositoryId = _0_CosRelationships.Relationship._NP_RepositoryId
_omni_op_d = {"_get_named_roles": _0_CosRelationships.Relationship._d__get_named_roles, "destroy": _0_CosRelationships.Relationship._d_destroy}
_omni_op_d.update(_0_CosObjectIdentity__POA.IdentifiableObject._omni_op_d)
Relationship._omni_skeleton = Relationship
_0_CosRelationships__POA.Relationship = Relationship
omniORB.registerSkeleton(Relationship._NP_RepositoryId, Relationship)
del Relationship
__name__ = "CosRelationships"
# interface Role
_0_CosRelationships._d_Role = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Role:1.0", "Role")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"] = _0_CosRelationships._d_Role
_0_CosRelationships.Role = omniORB.newEmptyClass()
class Role :
_NP_RepositoryId = _0_CosRelationships._d_Role[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# exception UnknownRoleName
_0_CosRelationships.Role.UnknownRoleName = omniORB.newEmptyClass()
class UnknownRoleName (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/UnknownRoleName:1.0"
_NP_ClassName = "CosRelationships.Role.UnknownRoleName"
def __init__(self):
CORBA.UserException.__init__(self)
_d_UnknownRoleName = (omniORB.tcInternal.tv_except, UnknownRoleName, UnknownRoleName._NP_RepositoryId, "UnknownRoleName")
_tc_UnknownRoleName = omniORB.tcInternal.createTypeCode(_d_UnknownRoleName)
omniORB.registerType(UnknownRoleName._NP_RepositoryId, _d_UnknownRoleName, _tc_UnknownRoleName)
# exception UnknownRelationship
_0_CosRelationships.Role.UnknownRelationship = omniORB.newEmptyClass()
class UnknownRelationship (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/UnknownRelationship:1.0"
_NP_ClassName = "CosRelationships.Role.UnknownRelationship"
def __init__(self):
CORBA.UserException.__init__(self)
_d_UnknownRelationship = (omniORB.tcInternal.tv_except, UnknownRelationship, UnknownRelationship._NP_RepositoryId, "UnknownRelationship")
_tc_UnknownRelationship = omniORB.tcInternal.createTypeCode(_d_UnknownRelationship)
omniORB.registerType(UnknownRelationship._NP_RepositoryId, _d_UnknownRelationship, _tc_UnknownRelationship)
# exception RelationshipTypeError
_0_CosRelationships.Role.RelationshipTypeError = omniORB.newEmptyClass()
class RelationshipTypeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/RelationshipTypeError:1.0"
_NP_ClassName = "CosRelationships.Role.RelationshipTypeError"
def __init__(self):
CORBA.UserException.__init__(self)
_d_RelationshipTypeError = (omniORB.tcInternal.tv_except, RelationshipTypeError, RelationshipTypeError._NP_RepositoryId, "RelationshipTypeError")
_tc_RelationshipTypeError = omniORB.tcInternal.createTypeCode(_d_RelationshipTypeError)
omniORB.registerType(RelationshipTypeError._NP_RepositoryId, _d_RelationshipTypeError, _tc_RelationshipTypeError)
# exception CannotDestroyRelationship
_0_CosRelationships.Role.CannotDestroyRelationship = omniORB.newEmptyClass()
class CannotDestroyRelationship (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/CannotDestroyRelationship:1.0"
_NP_ClassName = "CosRelationships.Role.CannotDestroyRelationship"
def __init__(self, offenders):
CORBA.UserException.__init__(self, offenders)
self.offenders = offenders
_d_CannotDestroyRelationship = (omniORB.tcInternal.tv_except, CannotDestroyRelationship, CannotDestroyRelationship._NP_RepositoryId, "CannotDestroyRelationship", "offenders", omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"])
_tc_CannotDestroyRelationship = omniORB.tcInternal.createTypeCode(_d_CannotDestroyRelationship)
omniORB.registerType(CannotDestroyRelationship._NP_RepositoryId, _d_CannotDestroyRelationship, _tc_CannotDestroyRelationship)
# exception ParticipatingInRelationship
_0_CosRelationships.Role.ParticipatingInRelationship = omniORB.newEmptyClass()
class ParticipatingInRelationship (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/ParticipatingInRelationship:1.0"
_NP_ClassName = "CosRelationships.Role.ParticipatingInRelationship"
def __init__(self, the_relationships):
CORBA.UserException.__init__(self, the_relationships)
self.the_relationships = the_relationships
_d_ParticipatingInRelationship = (omniORB.tcInternal.tv_except, ParticipatingInRelationship, ParticipatingInRelationship._NP_RepositoryId, "ParticipatingInRelationship", "the_relationships", omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"])
_tc_ParticipatingInRelationship = omniORB.tcInternal.createTypeCode(_d_ParticipatingInRelationship)
omniORB.registerType(ParticipatingInRelationship._NP_RepositoryId, _d_ParticipatingInRelationship, _tc_ParticipatingInRelationship)
_0_CosRelationships.Role = Role
_0_CosRelationships._tc_Role = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_Role)
omniORB.registerType(Role._NP_RepositoryId, _0_CosRelationships._d_Role, _0_CosRelationships._tc_Role)
# Role operations and attributes
Role._d__get_related_object = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/RelatedObject:1.0"],),None)
Role._d_get_other_related_object = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"]), (omniORB.typeMapping["IDL:omg.org/CosRelationships/RelatedObject:1.0"], ), {_0_CosRelationships.Role.UnknownRoleName._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRoleName, _0_CosRelationships.Role.UnknownRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRelationship})
Role._d_get_other_role = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"]), (omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], ), {_0_CosRelationships.Role.UnknownRoleName._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRoleName, _0_CosRelationships.Role.UnknownRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRelationship})
Role._d_get_relationships = ((omniORB.tcInternal.tv_ulong, ), (omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipIterator:1.0"]), None)
Role._d_destroy_relationships = ((), (), {_0_CosRelationships.Role.CannotDestroyRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_CannotDestroyRelationship})
Role._d_destroy = ((), (), {_0_CosRelationships.Role.ParticipatingInRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_ParticipatingInRelationship})
Role._d_check_minimum_cardinality = ((), (omniORB.tcInternal.tv_boolean, ), None)
Role._d_link = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"]), (), {_0_CosRelationships.RelationshipFactory.MaxCardinalityExceeded._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_MaxCardinalityExceeded, _0_CosRelationships.Role.RelationshipTypeError._NP_RepositoryId: _0_CosRelationships.Role._d_RelationshipTypeError})
Role._d_unlink = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], ), (), {_0_CosRelationships.Role.UnknownRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRelationship})
# Role object reference
class _objref_Role (CORBA.Object):
_NP_RepositoryId = Role._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def _get_related_object(self, *args):
return self._obj.invoke("_get_related_object", _0_CosRelationships.Role._d__get_related_object, args)
related_object = property(_get_related_object)
def get_other_related_object(self, *args):
return self._obj.invoke("get_other_related_object", _0_CosRelationships.Role._d_get_other_related_object, args)
def get_other_role(self, *args):
return self._obj.invoke("get_other_role", _0_CosRelationships.Role._d_get_other_role, args)
def get_relationships(self, *args):
return self._obj.invoke("get_relationships", _0_CosRelationships.Role._d_get_relationships, args)
def destroy_relationships(self, *args):
return self._obj.invoke("destroy_relationships", _0_CosRelationships.Role._d_destroy_relationships, args)
def destroy(self, *args):
return self._obj.invoke("destroy", _0_CosRelationships.Role._d_destroy, args)
def check_minimum_cardinality(self, *args):
return self._obj.invoke("check_minimum_cardinality", _0_CosRelationships.Role._d_check_minimum_cardinality, args)
def link(self, *args):
return self._obj.invoke("link", _0_CosRelationships.Role._d_link, args)
def unlink(self, *args):
return self._obj.invoke("unlink", _0_CosRelationships.Role._d_unlink, args)
omniORB.registerObjref(Role._NP_RepositoryId, _objref_Role)
_0_CosRelationships._objref_Role = _objref_Role
del Role, _objref_Role
# Role skeleton
__name__ = "CosRelationships__POA"
class Role (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.Role._NP_RepositoryId
_omni_op_d = {"_get_related_object": _0_CosRelationships.Role._d__get_related_object, "get_other_related_object": _0_CosRelationships.Role._d_get_other_related_object, "get_other_role": _0_CosRelationships.Role._d_get_other_role, "get_relationships": _0_CosRelationships.Role._d_get_relationships, "destroy_relationships": _0_CosRelationships.Role._d_destroy_relationships, "destroy": _0_CosRelationships.Role._d_destroy, "check_minimum_cardinality": _0_CosRelationships.Role._d_check_minimum_cardinality, "link": _0_CosRelationships.Role._d_link, "unlink": _0_CosRelationships.Role._d_unlink}
Role._omni_skeleton = Role
_0_CosRelationships__POA.Role = Role
omniORB.registerSkeleton(Role._NP_RepositoryId, Role)
del Role
__name__ = "CosRelationships"
# interface RoleFactory
_0_CosRelationships._d_RoleFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RoleFactory:1.0", "RoleFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleFactory:1.0"] = _0_CosRelationships._d_RoleFactory
_0_CosRelationships.RoleFactory = omniORB.newEmptyClass()
class RoleFactory :
_NP_RepositoryId = _0_CosRelationships._d_RoleFactory[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# exception NilRelatedObject
_0_CosRelationships.RoleFactory.NilRelatedObject = omniORB.newEmptyClass()
class NilRelatedObject (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleFactory/NilRelatedObject:1.0"
_NP_ClassName = "CosRelationships.RoleFactory.NilRelatedObject"
def __init__(self):
CORBA.UserException.__init__(self)
_d_NilRelatedObject = (omniORB.tcInternal.tv_except, NilRelatedObject, NilRelatedObject._NP_RepositoryId, "NilRelatedObject")
_tc_NilRelatedObject = omniORB.tcInternal.createTypeCode(_d_NilRelatedObject)
omniORB.registerType(NilRelatedObject._NP_RepositoryId, _d_NilRelatedObject, _tc_NilRelatedObject)
# exception RelatedObjectTypeError
_0_CosRelationships.RoleFactory.RelatedObjectTypeError = omniORB.newEmptyClass()
class RelatedObjectTypeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleFactory/RelatedObjectTypeError:1.0"
_NP_ClassName = "CosRelationships.RoleFactory.RelatedObjectTypeError"
def __init__(self):
CORBA.UserException.__init__(self)
_d_RelatedObjectTypeError = (omniORB.tcInternal.tv_except, RelatedObjectTypeError, RelatedObjectTypeError._NP_RepositoryId, "RelatedObjectTypeError")
_tc_RelatedObjectTypeError = omniORB.tcInternal.createTypeCode(_d_RelatedObjectTypeError)
omniORB.registerType(RelatedObjectTypeError._NP_RepositoryId, _d_RelatedObjectTypeError, _tc_RelatedObjectTypeError)
# typedef ... InterfaceDefs
class InterfaceDefs:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleFactory/InterfaceDefs:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_d_InterfaceDefs = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"], 0)
_ad_InterfaceDefs = (omniORB.tcInternal.tv_alias, InterfaceDefs._NP_RepositoryId, "InterfaceDefs", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"], 0))
_tc_InterfaceDefs = omniORB.tcInternal.createTypeCode(_ad_InterfaceDefs)
omniORB.registerType(InterfaceDefs._NP_RepositoryId, _ad_InterfaceDefs, _tc_InterfaceDefs)
_0_CosRelationships.RoleFactory = RoleFactory
_0_CosRelationships._tc_RoleFactory = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RoleFactory)
omniORB.registerType(RoleFactory._NP_RepositoryId, _0_CosRelationships._d_RoleFactory, _0_CosRelationships._tc_RoleFactory)
# RoleFactory operations and attributes
RoleFactory._d__get_role_type = ((),(omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"],),None)
RoleFactory._d__get_max_cardinality = ((),(omniORB.tcInternal.tv_ulong,),None)
RoleFactory._d__get_min_cardinality = ((),(omniORB.tcInternal.tv_ulong,),None)
RoleFactory._d__get_related_object_types = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleFactory/InterfaceDefs:1.0"],),None)
RoleFactory._d_create_role = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelatedObject:1.0"], ), (omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], ), {_0_CosRelationships.RoleFactory.NilRelatedObject._NP_RepositoryId: _0_CosRelationships.RoleFactory._d_NilRelatedObject, _0_CosRelationships.RoleFactory.RelatedObjectTypeError._NP_RepositoryId: _0_CosRelationships.RoleFactory._d_RelatedObjectTypeError})
# RoleFactory object reference
class _objref_RoleFactory (CORBA.Object):
_NP_RepositoryId = RoleFactory._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def _get_role_type(self, *args):
return self._obj.invoke("_get_role_type", _0_CosRelationships.RoleFactory._d__get_role_type, args)
role_type = property(_get_role_type)
def _get_max_cardinality(self, *args):
return self._obj.invoke("_get_max_cardinality", _0_CosRelationships.RoleFactory._d__get_max_cardinality, args)
max_cardinality = property(_get_max_cardinality)
def _get_min_cardinality(self, *args):
return self._obj.invoke("_get_min_cardinality", _0_CosRelationships.RoleFactory._d__get_min_cardinality, args)
min_cardinality = property(_get_min_cardinality)
def _get_related_object_types(self, *args):
return self._obj.invoke("_get_related_object_types", _0_CosRelationships.RoleFactory._d__get_related_object_types, args)
related_object_types = property(_get_related_object_types)
def create_role(self, *args):
return self._obj.invoke("create_role", _0_CosRelationships.RoleFactory._d_create_role, args)
omniORB.registerObjref(RoleFactory._NP_RepositoryId, _objref_RoleFactory)
_0_CosRelationships._objref_RoleFactory = _objref_RoleFactory
del RoleFactory, _objref_RoleFactory
# RoleFactory skeleton
__name__ = "CosRelationships__POA"
class RoleFactory (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.RoleFactory._NP_RepositoryId
_omni_op_d = {"_get_role_type": _0_CosRelationships.RoleFactory._d__get_role_type, "_get_max_cardinality": _0_CosRelationships.RoleFactory._d__get_max_cardinality, "_get_min_cardinality": _0_CosRelationships.RoleFactory._d__get_min_cardinality, "_get_related_object_types": _0_CosRelationships.RoleFactory._d__get_related_object_types, "create_role": _0_CosRelationships.RoleFactory._d_create_role}
RoleFactory._omni_skeleton = RoleFactory
_0_CosRelationships__POA.RoleFactory = RoleFactory
omniORB.registerSkeleton(RoleFactory._NP_RepositoryId, RoleFactory)
del RoleFactory
__name__ = "CosRelationships"
# interface RelationshipIterator
_0_CosRelationships._d_RelationshipIterator = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipIterator:1.0", "RelationshipIterator")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipIterator:1.0"] = _0_CosRelationships._d_RelationshipIterator
_0_CosRelationships.RelationshipIterator = omniORB.newEmptyClass()
class RelationshipIterator :
_NP_RepositoryId = _0_CosRelationships._d_RelationshipIterator[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosRelationships.RelationshipIterator = RelationshipIterator
_0_CosRelationships._tc_RelationshipIterator = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RelationshipIterator)
omniORB.registerType(RelationshipIterator._NP_RepositoryId, _0_CosRelationships._d_RelationshipIterator, _0_CosRelationships._tc_RelationshipIterator)
# RelationshipIterator operations and attributes
RelationshipIterator._d_next_one = ((), (omniORB.tcInternal.tv_boolean, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"]), None)
RelationshipIterator._d_next_n = ((omniORB.tcInternal.tv_ulong, ), (omniORB.tcInternal.tv_boolean, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"]), None)
RelationshipIterator._d_destroy = ((), (), None)
# RelationshipIterator object reference
class _objref_RelationshipIterator (CORBA.Object):
_NP_RepositoryId = RelationshipIterator._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def next_one(self, *args):
return self._obj.invoke("next_one", _0_CosRelationships.RelationshipIterator._d_next_one, args)
def next_n(self, *args):
return self._obj.invoke("next_n", _0_CosRelationships.RelationshipIterator._d_next_n, args)
def destroy(self, *args):
return self._obj.invoke("destroy", _0_CosRelationships.RelationshipIterator._d_destroy, args)
omniORB.registerObjref(RelationshipIterator._NP_RepositoryId, _objref_RelationshipIterator)
_0_CosRelationships._objref_RelationshipIterator = _objref_RelationshipIterator
del RelationshipIterator, _objref_RelationshipIterator
# RelationshipIterator skeleton
__name__ = "CosRelationships__POA"
class RelationshipIterator (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.RelationshipIterator._NP_RepositoryId
_omni_op_d = {"next_one": _0_CosRelationships.RelationshipIterator._d_next_one, "next_n": _0_CosRelationships.RelationshipIterator._d_next_n, "destroy": _0_CosRelationships.RelationshipIterator._d_destroy}
RelationshipIterator._omni_skeleton = RelationshipIterator
_0_CosRelationships__POA.RelationshipIterator = RelationshipIterator
omniORB.registerSkeleton(RelationshipIterator._NP_RepositoryId, RelationshipIterator)
del RelationshipIterator
__name__ = "CosRelationships"
#
# End of module "CosRelationships"
#
__name__ = "CosRelationships_idl"
_exported_modules = ( "CosRelationships", )
# The end.
|
amonmoce/corba_examples
|
omniORBpy-4.2.1/build/python/COS/CosRelationships_idl.py
|
Python
|
mit
| 42,484
|
from direction import Direction, Pivot
XMovement = {
Direction.left: -1,
Direction.up: 0,
Direction.right: 1,
Direction.down: 0,
Direction.up_left: -1,
Direction.up_right: 1,
Direction.down_left: -1,
Direction.down_right: 1
}
YMovement = {
Direction.left: 0,
Direction.up: -1,
Direction.right: 0,
Direction.down: 1,
Direction.up_left: -1,
Direction.up_right: -1,
Direction.down_left: 1,
Direction.down_right: 1
}
NewlineDirection = {
Direction.left: Direction.up,
Direction.up: Direction.right,
Direction.right: Direction.down,
Direction.down: Direction.left,
Direction.up_left: Direction.up_right,
Direction.up_right: Direction.down_right,
Direction.down_left: Direction.up_left,
Direction.down_right: Direction.down_left
}
NextDirection = {
Direction.left: Direction.up_left,
Direction.up: Direction.up_right,
Direction.right: Direction.down_right,
Direction.down: Direction.down_left,
Direction.up_left: Direction.up,
Direction.up_right: Direction.right,
Direction.down_left: Direction.left,
Direction.down_right: Direction.down
}
DirectionCharacters = {
Direction.left: "-",
Direction.up: "|",
Direction.right: "-",
Direction.down: "|",
Direction.up_left: "\\",
Direction.up_right: "/",
Direction.down_left: "/",
Direction.down_right: "\\"
}
PivotLookup = {
Pivot.left: {
Direction.left: Direction.down_left,
Direction.up: Direction.up_left,
Direction.right: Direction.up_right,
Direction.down: Direction.down_right,
Direction.up_left: Direction.left,
Direction.up_right: Direction.up,
Direction.down_left: Direction.down,
Direction.down_right: Direction.right
},
Pivot.right: {
Direction.left: Direction.up_left,
Direction.up: Direction.up_right,
Direction.right: Direction.down_right,
Direction.down: Direction.down_left,
Direction.up_left: Direction.up,
Direction.up_right: Direction.right,
Direction.down_left: Direction.left,
Direction.down_right: Direction.down
}
}
DirectionFromXYSigns = {
-1: {-1: Direction.up_left, 0: Direction.left, 1: Direction.down_left},
0: {-1: Direction.up, 0: Direction.right, 1: Direction.down},
1: {-1: Direction.up_right, 0: Direction.right, 1: Direction.down_right}
}
|
somebody1234/Charcoal
|
directiondictionaries.py
|
Python
|
mit
| 2,433
|
from rich.console import Console
from rich.markdown import Markdown
from rich.syntax import Syntax
def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap):
syntax = Syntax(
code,
lang,
theme=theme,
line_numbers=line_numbers,
code_width=code_width,
word_wrap=word_wrap,
)
return syntax
def display_markdown(code):
try:
markdown = Markdown(code)
console = Console()
console.print(markdown) # noqa
return True # Success
except Exception:
return False # Failure
def display_code(code):
try:
console = Console()
console.print(code) # noqa
return True # Success
except Exception:
return False # Failure
def fix_emoji_spacing(code):
try:
# Fix the display width of certain emojis that take up two spaces
double_width_emojis = [
"🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️"
]
for emoji in double_width_emojis:
if emoji in code:
code = code.replace(emoji, emoji + " ")
except Exception:
pass
return code
|
seleniumbase/SeleniumBase
|
seleniumbase/console_scripts/rich_helper.py
|
Python
|
mit
| 1,219
|
""" Sorted input and output.
"""
from collections import deque
from operator import itemgetter
from .buffer import _ReaderBuffer
from .buffer import _WriterBuffer
__all__ = "SortReader", "SortWriter"
class _Sorter(object):
""" Abstract base class for SortReader and SortWriter.
"""
def __init__(self, key, group=None):
""" Initialize this object.
The key argument determines sort order and is either a single field
name, a sequence of names, or a key function that returns a key value.
The optional group argument is like the key argument but is used to
group records that are already partially sorted. Records will be sorted
within each group rather than as a single sequence. If the groups are
small relative to the total sequence length this can significantly
improve performance and memory usage.
"""
def keyfunc(key):
""" Create a key function. """
if not key or callable(key):
return key
if isinstance(key, str):
key = (key,)
return itemgetter(*key)
self._get_key = keyfunc(key)
self._get_group = keyfunc(group)
self._group = None
self._buffer = []
self._output = None # initialized by derived classes
return
def _queue(self, record):
""" Process each incoming record.
"""
if self._get_group:
group = self._get_group(record)
if group != self._group:
# This is a new group; process the previous group.
self._flush()
self._group = group
self._buffer.append(record)
return
def _flush(self):
""" Send sorted records to the output queue.
"""
if not self._buffer:
return
self._buffer.sort(key=self._get_key)
self._output = deque(self._buffer)
self._buffer = []
return
class SortReader(_Sorter, _ReaderBuffer):
""" Sort input from another reader.
"""
def __init__(self, reader, key, group=None):
""" Initialize this object.
"""
_Sorter.__init__(self, key, group)
_ReaderBuffer.__init__(self, reader)
return
def _uflow(self):
""" Handle an underflow condition.
This is called when the input reader is exhausted and there are no
records in the output queue.
"""
if not self._buffer:
# All data has been output.
raise StopIteration
self._flush()
return
class SortWriter(_Sorter, _WriterBuffer):
""" Sort output for another writer.
"""
def __init__(self, writer, key, group=None):
""" Initialize this object.
"""
_Sorter.__init__(self, key, group)
_WriterBuffer.__init__(self, writer)
return
|
mdklatt/serial-python
|
src/serial/core/sort.py
|
Python
|
mit
| 3,047
|
# Make your image, region, and location changes then change the from-import
# to match.
from configurables_akeeton_desktop import *
import hashlib
import java.awt.Toolkit
import json
import os
import shutil
import time
Settings.ActionLogs = True
Settings.InfoLogs = True
Settings.DebugLogs = True
Settings.LogTime = True
Settings.AutoWaitTimeout = AUTO_WAIT_TIMEOUT_SECONDS
TEMP_DIR_PREFIX = time.strftime("MTGO-scry-bug_%Y-%m-%d_%H-%M-%S", time.gmtime())
TEMP_PATH = tempfile.mkdtemp(prefix=TEMP_DIR_PREFIX)
attempts = 0
def main():
global attempts
attempts += 1
ATTEMPT_NUM_PATH = get_attempt_number_path(attempts)
HITS_PATH = os.path.join(ATTEMPT_NUM_PATH, HITS_DIR)
MISSES_PATH = os.path.join(ATTEMPT_NUM_PATH, MISSES_DIR)
print "TEMP_PATH:", TEMP_PATH
print "ATTEMPT_NUM_PATH", ATTEMPT_NUM_PATH
print "HITS_PATH:", HITS_PATH
print "MISSES_PATH:", MISSES_PATH
os.mkdir(ATTEMPT_NUM_PATH)
os.mkdir(HITS_PATH)
os.mkdir(MISSES_PATH)
iterations = 0
hits = 0
card_hash_to_times_card_sent_to_bottom = ['card_hash_to_times_card_sent_to_bottom', ZeroValueDict()]
card_hash_to_times_card_sent_to_bottom_and_drawn = ['card_hash_to_times_card_sent_to_bottom_and_drawn', ZeroValueDict()]
card_hash_to_times_card_drawn = ['card_hash_to_times_card_drawn', ZeroValueDict()]
card_hash_to_capture = ['card_hash_to_capture', {}]
while True:
REGION_PLAY.wait("play.png")
time.sleep(0.5)
REGION_PLAY.click(LOCATION_PLAY)
time.sleep(1.0)
REGION_MULLIGAN_KEEP.wait("mulligan_keep.png")
for i in range(0, 7):
REGION_MULLIGAN_KEEP.wait("mulligan_highlighted_keep.png")
time.sleep(2.0) # I swear if I have to keep incrementing this value...
REGION_MULLIGAN_KEEP.click(LOCATION_MULLIGAN)
time.sleep(1.0)
REGION_TEMPORARY_ZONE.wait("temporary_zone.png")
time.sleep(0.5)
click(LOCATION_TEMPORARY_ZONE_CARD)
time.sleep(0.5)
REGION_PUT_ON_THE_BOTTOM_OF_YOUR_LIBRARY.click(LOCATION_PUT_ON_THE_BOTTOM_OF_YOUR_LIBRARY)
time.sleep(0.1)
REGION_CHAT_PUT_A_CARD_ON_THE_BOTTOM_OF_THE_LIBRARY.wait("chat_put_a_card_on_the_bottom_of_the_library.png")
time.sleep(0.1)
card_sent_to_bottom_capture = capture(REGION_CARD_PREVIEW_CAPTURE)
hover(LOCATION_FIRST_CARD_IN_HAND) # Update the preview with the drawn card.
time.sleep(0.5)
card_drawn_capture = capture(REGION_CARD_PREVIEW_CAPTURE)
copy_path = ""
card_sent_to_bottom_hash = hash_file(card_sent_to_bottom_capture)
card_drawn_hash = hash_file(card_drawn_capture)
card_hash_to_times_card_sent_to_bottom[1][card_sent_to_bottom_hash] += 1
card_hash_to_times_card_drawn[1][card_drawn_hash] += 1
if card_sent_to_bottom_hash == card_drawn_hash:
hits += 1
card_hash_to_times_card_sent_to_bottom_and_drawn[1][card_sent_to_bottom_hash] += 1
copy_path = HITS_PATH
else:
copy_path = MISSES_PATH
iterations += 1
print "{0}/{1}".format(hits, iterations)
card_sent_to_bottom_capture_dest_path = os.path.join(copy_path, str(iterations) + "_bottom.png")
card_drawn_capture_dest_path = os.path.join(copy_path, str(iterations) + "_drawn.png")
shutil.move(card_sent_to_bottom_capture, card_sent_to_bottom_capture_dest_path)
shutil.move(card_drawn_capture, card_drawn_capture_dest_path)
card_hash_to_capture[1][card_sent_to_bottom_hash] = card_sent_to_bottom_capture_dest_path
card_hash_to_capture[1][card_drawn_hash] = card_drawn_capture_dest_path
with open(os.path.join(ATTEMPT_NUM_PATH, 'stats.json'), 'w') as stats_file:
json.dump(card_hash_to_times_card_sent_to_bottom_and_drawn, stats_file, sort_keys=True, indent=4)
stats_file.write('\n')
json.dump(card_hash_to_times_card_sent_to_bottom, stats_file, sort_keys=True, indent=4)
stats_file.write('\n')
json.dump(card_hash_to_times_card_drawn, stats_file, sort_keys=True, indent=4)
stats_file.write('\n')
json.dump(card_hash_to_capture, stats_file, sort_keys=True, indent=4)
stats_file.write('\n')
stats_file.write('{0}/{1}'.format(hits, iterations))
click(LOCATION_X_CLOSE)
REGION_CONCEDE_MATCH_BUTTON.wait("concede_match.png")
time.sleep(0.1)
type('\n')
class ZeroValueDict(dict):
def __missing__(self, key):
return 0
def hash_file(file_path):
hasher = hashlib.md5()
with open(file_path, 'rb') as opened_file:
buf = opened_file.read()
hasher.update(buf)
return hasher.hexdigest()
def get_attempt_number_path(attempts):
return os.path.join(TEMP_PATH, 'attempt_{0}'.format(attempts))
if __name__ == '__main__':
while True:
try:
main()
except FindFailed as e:
for i in range(0, TIMES_TO_BEEP_ON_FIND_FAIlED):
java.awt.Toolkit.getDefaultToolkit().beep()
time.sleep(1.0)
print e
with open(os.path.join(get_attempt_number_path(attempts), 'error.log'), 'w') as errorlog:
errorlog.write(str(e))
raise e # Replace this with a way to reset MTGO to a starting state so we can try again.
|
akeeton/MTGO-scry-bug-test
|
MTGO-scry-bug-test.sikuli/MTGO-scry-bug-test.py
|
Python
|
mit
| 5,599
|
import logging
from logging.handlers import RotatingFileHandler
import os
from appdirs import user_cache_dir
def configure_logging():
cache_dir = user_cache_dir(appname='spoppy')
LOG_FILE_NAME = os.path.join(
cache_dir, 'spoppy.log'
)
LOG_LEVEL = getattr(
logging,
os.getenv('SPOPPY_LOG_LEVEL', '').upper(),
logging.INFO
)
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
logger = logging.getLogger('spoppy')
logger.setLevel(LOG_LEVEL)
handler = RotatingFileHandler(
LOG_FILE_NAME,
maxBytes=1024 * 1024 * 10,
backupCount=10,
)
handler.setLevel(LOG_LEVEL)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.debug('Spoppy logger set up')
requests_log = logging.getLogger('urllib3')
requests_log.setLevel(LOG_LEVEL)
requests_log.propagate = True
requests_log.addHandler(handler)
logger.debug('urllib3 logger set up')
|
sindrig/spoppy
|
spoppy/logging_utils.py
|
Python
|
mit
| 1,088
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2016 <Jamie McGowan> <jamiemcgowan.dev@gmail.com>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
### END LICENSE
from locale import gettext as _
import logging
logger = logging.getLogger('remarkable')
from remarkable_lib.AboutDialog import AboutDialog
# See remarkable_lib.AboutDialog.py for more details about how this class works.
class AboutRemarkableDialog(AboutDialog):
__gtype_name__ = "AboutRemarkableDialog"
def finish_initializing(self, builder): # pylint: disable=E1002
"""Set up the about dialog"""
super(AboutRemarkableDialog, self).finish_initializing(builder)
# Code for other initialization actions should be added here.
|
byohay/Remarkable
|
remarkable/AboutRemarkableDialog.py
|
Python
|
mit
| 1,798
|
"""
SoftLayer.tests.CLI.modules.file_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer import exceptions
from SoftLayer import testing
import json
import mock
class FileTests(testing.TestCase):
def test_access_list(self):
result = self.run_command(['file', 'access-list', '1234'])
self.assert_no_fail(result)
def test_authorize_host_to_volume(self):
result = self.run_command(['file', 'access-authorize', '12345678',
'--hardware-id=100', '--virtual-id=10',
'--ip-address-id=192',
'--ip-address=192.3.2.1',
'--subnet-id=200'])
self.assert_no_fail(result)
def test_deauthorize_host_to_volume(self):
result = self.run_command(['file', 'access-revoke', '12345678',
'--hardware-id=100', '--virtual-id=10',
'--ip-address-id=192',
'--ip-address=192.3.2.1',
'--subnet-id=200'])
self.assert_no_fail(result)
def test_volume_list(self):
result = self.run_command(['file', 'volume-list'])
self.assert_no_fail(result)
self.assertEqual([
{
'bytes_used': None,
'capacity_gb': 10,
'datacenter': 'Dallas',
'id': 1,
'ip_addr': '127.0.0.1',
'storage_type': 'ENDURANCE',
'username': 'user',
'active_transactions': None,
'mount_addr': '127.0.0.1:/TEST',
'rep_partner_count': None
}],
json.loads(result.output))
@mock.patch('SoftLayer.FileStorageManager.list_file_volumes')
def test_volume_count(self, list_mock):
list_mock.return_value = [
{'serviceResource': {'datacenter': {'name': 'dal09'}}},
{'serviceResource': {'datacenter': {'name': 'ams01'}}},
{'serviceResource': {'datacenter': {'name': 'ams01'}}}
]
result = self.run_command(['file', 'volume-count'])
self.assert_no_fail(result)
self.assertEqual(
{
'ams01': 2,
'dal09': 1
},
json.loads(result.output))
def test_snapshot_list(self):
result = self.run_command(['file', 'snapshot-list', '1234'])
self.assert_no_fail(result)
self.assertEqual([
{
'id': 470,
'name': 'unit_testing_note',
'created': '2016-07-06T07:41:19-05:00',
'size_bytes': '42',
}],
json.loads(result.output))
def test_volume_cancel(self):
result = self.run_command([
'--really', 'file', 'volume-cancel', '1234'])
self.assert_no_fail(result)
self.assertEqual('File volume with id 1234 has been marked'
' for cancellation\n', result.output)
self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem',
args=(False, True, None))
def test_volume_cancel_with_billing_item(self):
result = self.run_command([
'--really', 'file', 'volume-cancel', '1234'])
self.assert_no_fail(result)
self.assertEqual('File volume with id 1234 has been marked'
' for cancellation\n', result.output)
self.assert_called_with('SoftLayer_Network_Storage', 'getObject')
def test_volume_cancel_without_billing_item(self):
p_mock = self.set_mock('SoftLayer_Network_Storage', 'getObject')
p_mock.return_value = {
"accountId": 1234,
"capacityGb": 20,
"createDate": "2015-04-29T06:55:55-07:00",
"id": 11111,
"nasType": "NAS",
"username": "SL01SEV307608_1"
}
result = self.run_command([
'--really', 'file', 'volume-cancel', '1234'])
self.assertIsInstance(result.exception, exceptions.SoftLayerError)
def test_volume_detail(self):
result = self.run_command(['file', 'volume-detail', '1234'])
self.assert_no_fail(result)
self.assertEqual({
'Username': 'username',
'Used Space': '0B',
'Endurance Tier': 'READHEAVY_TIER',
'IOPs': 1000,
'Mount Address': '127.0.0.1:/TEST',
'Snapshot Capacity (GB)': '10',
'Snapshot Used (Bytes)': 1024,
'Capacity (GB)': '20GB',
'Target IP': '10.1.2.3',
'Data Center': 'dal05',
'Type': 'ENDURANCE',
'ID': 100,
'# of Active Transactions': '1',
'Ongoing Transaction': 'This is a buffer time in which the customer may cancel the server',
'Replicant Count': '1',
'Replication Status': 'Replicant Volume Provisioning '
'has completed.',
'Replicant Volumes': [[
{'Replicant ID': 'Volume Name', '1784': 'TEST_REP_1'},
{'Replicant ID': 'Target IP', '1784': '10.3.174.79'},
{'Replicant ID': 'Data Center', '1784': 'wdc01'},
{'Replicant ID': 'Schedule', '1784': 'REPLICATION_HOURLY'},
], [
{'Replicant ID': 'Volume Name', '1785': 'TEST_REP_2'},
{'Replicant ID': 'Target IP', '1785': '10.3.177.84'},
{'Replicant ID': 'Data Center', '1785': 'dal01'},
{'Replicant ID': 'Schedule', '1785': 'REPLICATION_DAILY'},
]],
'Original Volume Properties': [
{'Property': 'Original Volume Size',
'Value': '20'},
{'Property': 'Original Volume Name',
'Value': 'test-original-volume-name'},
{'Property': 'Original Snapshot Name',
'Value': 'test-original-snapshot-name'}
]
}, json.loads(result.output))
def test_volume_order_performance_iops_not_given(self):
result = self.run_command(['file', 'volume-order',
'--storage-type=performance', '--size=20',
'--location=dal05'])
self.assertEqual(2, result.exit_code)
def test_volume_order_performance_snapshot_error(self):
result = self.run_command(['file', 'volume-order',
'--storage-type=performance', '--size=20',
'--iops=100', '--location=dal05',
'--snapshot-size=10',
'--service-offering=performance'])
self.assertEqual(2, result.exit_code)
@mock.patch('SoftLayer.FileStorageManager.order_file_volume')
def test_volume_order_performance(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 478,
'items': [
{'description': 'Performance Storage'},
{'description': 'File Storage'},
{'description': '0.25 IOPS per GB'},
{'description': '20 GB Storage Space'},
{'description': '10 GB Storage Space (Snapshot Space)'}]
}
}
result = self.run_command(['file', 'volume-order',
'--storage-type=performance', '--size=20',
'--iops=100', '--location=dal05',
'--snapshot-size=10'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #478 placed successfully!\n'
' > Performance Storage\n > File Storage\n'
' > 0.25 IOPS per GB\n > 20 GB Storage Space\n'
' > 10 GB Storage Space (Snapshot Space)\n')
def test_volume_order_endurance_tier_not_given(self):
result = self.run_command(['file', 'volume-order',
'--storage-type=endurance', '--size=20',
'--location=dal05'])
self.assertEqual(2, result.exit_code)
@mock.patch('SoftLayer.FileStorageManager.order_file_volume')
def test_volume_order_endurance(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 478,
'items': [
{'description': 'Endurance Storage'},
{'description': 'File Storage'},
{'description': '0.25 IOPS per GB'},
{'description': '20 GB Storage Space'},
{'description': '10 GB Storage Space (Snapshot Space)'}]
}
}
result = self.run_command(['file', 'volume-order',
'--storage-type=endurance', '--size=20',
'--tier=0.25', '--location=dal05',
'--snapshot-size=10'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #478 placed successfully!\n'
' > Endurance Storage\n > File Storage\n'
' > 0.25 IOPS per GB\n > 20 GB Storage Space\n'
' > 10 GB Storage Space (Snapshot Space)\n')
@mock.patch('SoftLayer.FileStorageManager.order_file_volume')
def test_volume_order_order_not_placed(self, order_mock):
order_mock.return_value = {}
result = self.run_command(['file', 'volume-order',
'--storage-type=endurance', '--size=20',
'--tier=0.25', '--location=dal05'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order could not be placed! Please verify '
'your options and try again.\n')
def test_volume_order_hourly_billing_not_available(self):
result = self.run_command(['file', 'volume-order',
'--storage-type=endurance', '--size=20',
'--tier=0.25', '--location=dal10',
'--billing=hourly',
'--service-offering=enterprise'])
self.assertEqual(2, result.exit_code)
@mock.patch('SoftLayer.FileStorageManager.order_file_volume')
def test_volume_order_hourly_billing(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 479,
'items': [
{'description': 'Storage as a Service'},
{'description': 'File Storage'},
{'description': '20 GB Storage Space'},
{'description': '0.25 IOPS per GB'},
{'description': '10 GB Storage Space (Snapshot Space)'}]
}
}
result = self.run_command(['file', 'volume-order',
'--storage-type=endurance', '--size=20',
'--tier=0.25', '--location=dal05',
'--service-offering=storage_as_a_service',
'--billing=hourly', '--snapshot-size=10'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #479 placed successfully!\n'
' > Storage as a Service\n'
' > File Storage\n'
' > 20 GB Storage Space\n'
' > 0.25 IOPS per GB\n'
' > 10 GB Storage Space (Snapshot Space)\n')
@mock.patch('SoftLayer.FileStorageManager.order_file_volume')
def test_volume_order_performance_manager_error(self, order_mock):
order_mock.side_effect = ValueError('failure!')
result = self.run_command(['file', 'volume-order',
'--storage-type=performance', '--size=20',
'--iops=100', '--location=dal05'])
self.assertEqual(2, result.exit_code)
self.assertEqual('Argument Error: failure!', result.exception.message)
@mock.patch('SoftLayer.FileStorageManager.order_file_volume')
def test_volume_order_endurance_manager_error(self, order_mock):
order_mock.side_effect = ValueError('failure!')
result = self.run_command(['file', 'volume-order',
'--storage-type=endurance', '--size=20',
'--tier=0.25', '--location=dal05'])
self.assertEqual(2, result.exit_code)
self.assertEqual('Argument Error: failure!', result.exception.message)
def test_enable_snapshots(self):
result = self.run_command(['file', 'snapshot-enable', '12345678',
'--schedule-type=HOURLY', '--minute=10',
'--retention-count=5'])
self.assert_no_fail(result)
def test_disable_snapshots(self):
result = self.run_command(['file', 'snapshot-disable', '12345678',
'--schedule-type=HOURLY'])
self.assert_no_fail(result)
def test_list_volume_schedules(self):
result = self.run_command([
'file', 'snapshot-schedule-list', '12345678'])
self.assert_no_fail(result)
self.assertEqual([
{
"week": None,
"maximum_snapshots": None,
"hour": None,
"day_of_week": None,
"day": None,
"replication": None,
"date_of_month": None,
"month_of_year": None,
"active": "",
"date_created": "",
"type": "WEEKLY",
"id": 978,
"minute": '30'
},
{
"week": None,
"maximum_snapshots": None,
"hour": None,
"day_of_week": None,
"day": None,
"replication": '*',
"date_of_month": None,
"month_of_year": None,
"active": "",
"date_created": "",
"type": "INTERVAL",
"id": 988,
"minute": '*'
}
], json.loads(result.output))
def test_create_snapshot(self):
result = self.run_command(['file', 'snapshot-create', '12345678'])
self.assert_no_fail(result)
self.assertEqual('New snapshot created with id: 449\n', result.output)
@mock.patch('SoftLayer.FileStorageManager.create_snapshot')
def test_create_snapshot_unsuccessful(self, snapshot_mock):
snapshot_mock.return_value = []
result = self.run_command(['file', 'snapshot-create', '8', '-n=note'])
self.assertEqual('Error occurred while creating snapshot.\n'
'Ensure volume is not failed over or in another '
'state which prevents taking snapshots.\n',
result.output)
def test_snapshot_restore(self):
result = self.run_command(['file', 'snapshot-restore', '12345678',
'--snapshot-id=87654321'])
self.assert_no_fail(result)
self.assertEqual(result.output, 'File volume 12345678 is being'
' restored using snapshot 87654321\n')
def test_delete_snapshot(self):
result = self.run_command(['file', 'snapshot-delete', '12345678'])
self.assert_no_fail(result)
@mock.patch('SoftLayer.FileStorageManager.order_snapshot_space')
def test_snapshot_order_order_not_placed(self, order_mock):
order_mock.return_value = {}
result = self.run_command(['file', 'snapshot-order', '1234',
'--capacity=10', '--tier=0.25'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order could not be placed! Please verify '
'your options and try again.\n')
@mock.patch('SoftLayer.FileStorageManager.order_snapshot_space')
def test_snapshot_order_performance_manager_error(self, order_mock):
order_mock.side_effect = ValueError('failure!')
result = self.run_command(['file', 'snapshot-order', '1234',
'--capacity=10', '--tier=0.25'])
self.assertEqual(2, result.exit_code)
self.assertEqual('Argument Error: failure!', result.exception.message)
@mock.patch('SoftLayer.FileStorageManager.order_snapshot_space')
def test_snapshot_order(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 8702,
'items': [{'description':
'10 GB Storage Space (Snapshot Space)'}],
'status': 'PENDING_APPROVAL',
}
}
result = self.run_command(['file', 'snapshot-order', '1234',
'--capacity=10', '--tier=0.25'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #8702 placed successfully!\n'
' > 10 GB Storage Space (Snapshot Space)\n'
' > Order status: PENDING_APPROVAL\n')
def test_snapshot_cancel(self):
result = self.run_command(['--really',
'file', 'snapshot-cancel', '1234'])
self.assert_no_fail(result)
self.assertEqual('File volume with id 1234 has been marked'
' for snapshot cancellation\n', result.output)
self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem',
args=(False, True, None))
def test_replicant_failover(self):
result = self.run_command(['file', 'replica-failover', '12345678',
'--replicant-id=5678', '--immediate'])
self.assert_no_fail(result)
self.assertEqual('Failover to replicant is now in progress.\n',
result.output)
@mock.patch('SoftLayer.FileStorageManager.failover_to_replicant')
def test_replicant_failover_unsuccessful(self, failover_mock):
failover_mock.return_value = False
result = self.run_command(['file', 'replica-failover', '12345678',
'--replicant-id=5678'])
self.assertEqual('Failover operation could not be initiated.\n',
result.output)
def test_replicant_failback(self):
result = self.run_command(['file', 'replica-failback', '12345678',
'--replicant-id=5678'])
self.assert_no_fail(result)
self.assertEqual('Failback from replicant is now in progress.\n',
result.output)
@mock.patch('SoftLayer.FileStorageManager.failback_from_replicant')
def test_replicant_failback_unsuccessful(self, failback_mock):
failback_mock.return_value = False
result = self.run_command(['file', 'replica-failback', '12345678',
'--replicant-id=5678'])
self.assertEqual('Failback operation could not be initiated.\n',
result.output)
@mock.patch('SoftLayer.FileStorageManager.order_replicant_volume')
def test_replicant_order_order_not_placed(self, order_mock):
order_mock.return_value = {}
result = self.run_command(['file', 'replica-order', '100',
'--snapshot-schedule=DAILY',
'--location=dal05'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order could not be placed! Please verify '
'your options and try again.\n')
@mock.patch('SoftLayer.FileStorageManager.order_replicant_volume')
def test_replicant_order(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 77309,
'items': [
{'description': 'Endurance Storage'},
{'description': '2 IOPS per GB'},
{'description': 'File Storage'},
{'description': '20 GB Storage Space'},
{'description': '10 GB Storage Space (Snapshot Space)'},
{'description': '20 GB Storage Space Replicant of: TEST'},
],
}
}
result = self.run_command(['file', 'replica-order', '100',
'--snapshot-schedule=DAILY',
'--location=dal05', '--tier=2'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #77309 placed successfully!\n'
' > Endurance Storage\n'
' > 2 IOPS per GB\n'
' > File Storage\n'
' > 20 GB Storage Space\n'
' > 10 GB Storage Space (Snapshot Space)\n'
' > 20 GB Storage Space Replicant of: TEST\n')
def test_replication_locations(self):
result = self.run_command(['file', 'replica-locations', '1234'])
self.assert_no_fail(result)
self.assertEqual(
{
'12345': 'Dallas 05',
},
json.loads(result.output))
@mock.patch('SoftLayer.FileStorageManager.get_replication_locations')
def test_replication_locations_unsuccessful(self, locations_mock):
locations_mock.return_value = False
result = self.run_command(['file', 'replica-locations', '1234'])
self.assert_no_fail(result)
self.assertEqual('No data centers compatible for replication.\n',
result.output)
def test_replication_partners(self):
result = self.run_command(['file', 'replica-partners', '1234'])
self.assert_no_fail(result)
self.assertEqual([
{
'ID': 1784,
'Account ID': 3000,
'Capacity (GB)': 20,
'Host ID': None,
'Guest ID': None,
'Hardware ID': None,
'Username': 'TEST_REP_1',
},
{
'ID': 1785,
'Account ID': 3001,
'Host ID': None,
'Guest ID': None,
'Hardware ID': None,
'Capacity (GB)': 20,
'Username': 'TEST_REP_2',
}],
json.loads(result.output))
@mock.patch('SoftLayer.FileStorageManager.get_replication_partners')
def test_replication_partners_unsuccessful(self, partners_mock):
partners_mock.return_value = False
result = self.run_command(['file', 'replica-partners', '1234'])
self.assertEqual(
'There are no replication partners for the given volume.\n',
result.output)
@mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume')
def test_duplicate_order_exception_caught(self, order_mock):
order_mock.side_effect = ValueError('order attempt failed, oh noooo!')
result = self.run_command(['file', 'volume-duplicate', '100'])
self.assertEqual(2, result.exit_code)
self.assertEqual('Argument Error: order attempt failed, oh noooo!',
result.exception.message)
@mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume')
def test_duplicate_order_order_not_placed(self, order_mock):
order_mock.return_value = {}
result = self.run_command(['file', 'volume-duplicate', '100',
'--duplicate-iops=1400'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order could not be placed! Please verify '
'your options and try again.\n')
@mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume')
def test_duplicate_order(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 24602,
'items': [{'description': 'Storage as a Service'}]
}
}
result = self.run_command(['file', 'volume-duplicate', '100',
'--origin-snapshot-id=470',
'--duplicate-size=250',
'--duplicate-tier=2',
'--duplicate-snapshot-size=20'])
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #24602 placed successfully!\n'
' > Storage as a Service\n')
@mock.patch('SoftLayer.FileStorageManager.order_duplicate_volume')
def test_duplicate_order_hourly_billing(self, order_mock):
order_mock.return_value = {
'placedOrder': {
'id': 24602,
'items': [{'description': 'Storage as a Service'}]
}
}
result = self.run_command(['file', 'volume-duplicate', '100',
'--origin-snapshot-id=470',
'--duplicate-size=250',
'--duplicate-tier=2', '--billing=hourly',
'--duplicate-snapshot-size=20'])
order_mock.assert_called_with('100', origin_snapshot_id=470,
duplicate_size=250, duplicate_iops=None,
duplicate_tier_level=2,
duplicate_snapshot_size=20,
hourly_billing_flag=True)
self.assert_no_fail(result)
self.assertEqual(result.output,
'Order #24602 placed successfully!\n'
' > Storage as a Service\n')
@mock.patch('SoftLayer.FileStorageManager.order_modified_volume')
def test_modify_order_exception_caught(self, order_mock):
order_mock.side_effect = ValueError('order attempt failed, noooo!')
result = self.run_command(['file', 'volume-modify', '102', '--new-size=1000'])
self.assertEqual(2, result.exit_code)
self.assertEqual('Argument Error: order attempt failed, noooo!', result.exception.message)
@mock.patch('SoftLayer.FileStorageManager.order_modified_volume')
def test_modify_order_order_not_placed(self, order_mock):
order_mock.return_value = {}
result = self.run_command(['file', 'volume-modify', '102', '--new-iops=1400'])
self.assert_no_fail(result)
self.assertEqual('Order could not be placed! Please verify your options and try again.\n', result.output)
@mock.patch('SoftLayer.FileStorageManager.order_modified_volume')
def test_modify_order(self, order_mock):
order_mock.return_value = {'placedOrder': {'id': 24602, 'items': [{'description': 'Storage as a Service'},
{'description': '1000 GBs'},
{'description': '4 IOPS per GB'}]}}
result = self.run_command(['file', 'volume-modify', '102', '--new-size=1000', '--new-tier=4'])
order_mock.assert_called_with('102', new_size=1000, new_iops=None, new_tier_level=4)
self.assert_no_fail(result)
self.assertEqual('Order #24602 placed successfully!\n > Storage as a Service\n > 1000 GBs\n > 4 IOPS per GB\n',
result.output)
|
kyubifire/softlayer-python
|
tests/CLI/modules/file_tests.py
|
Python
|
mit
| 28,087
|
#! /usr/bin/env python
"""
Module with simplex (Nelder-Mead) optimization for defining the flux and
position of a companion using the Negative Fake Companion.
"""
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from .simplex_fmerit import chisquare
from ..var import frame_center
from ..conf import time_ini, timing, sep
__all__ = ['firstguess_from_coord',
'firstguess_simplex',
'firstguess']
def firstguess_from_coord(planet, center, cube, angs, PLSC, psf,
fwhm, annulus_width, aperture_radius, ncomp,
cube_ref=None, svd_mode='lapack', scaling=None,
fmerit='sum', collapse='median', f_range=None,
display=False, verbose=True, save=False, **kwargs):
"""
Determine a first guess for the flux of a companion at a given position
in the cube by doing a simple grid search evaluating the reduced chi2.
Parameters
----------
planet: numpy.array
The (x,y) position of the planet in the pca processed cube.
center: numpy.array
The (x,y) position of the cube center.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
PLSC: float
The platescale, in arcsec per pixel.
psf: numpy.array
The scaled psf expressed as a numpy.array.
fwhm : float
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
ncomp: int
The number of principal components.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
f_range: numpy.array, optional
The range of flux tested values. If None, 20 values between 0 and 5000
are tested.
display: boolean, optional
If True, the figure chi2 vs. flux is displayed.
verbose: boolean
If True, display intermediate info in the shell.
save: boolean, optional
If True, the figure chi2 vs. flux is saved.
kwargs: dict, optional
Additional parameters are passed to the matplotlib plot method.
Returns
-------
out : numpy.array
The radial coordinates and the flux of the companion.
"""
xy = planet-center
r0= np.sqrt(xy[0]**2+xy[1]**2)
theta0 = np.mod(np.arctan2(xy[1],xy[0])/np.pi*180,360)
if f_range is not None:
n = f_range.shape[0]
else:
n = 20
f_range = np.linspace(0,5000,n)
chi2r = []
if verbose:
print('Step | flux | chi2r')
counter = 0
for j, f_guess in enumerate(f_range):
chi2r.append(chisquare((r0,theta0,f_guess), cube, angs, PLSC, psf,
fwhm, annulus_width, aperture_radius,(r0,theta0),
ncomp, cube_ref, svd_mode, scaling, fmerit,
collapse))
if chi2r[j] > chi2r[j-1]: counter+=1
if counter == 4: break
if verbose:
print('{}/{} {:.3f} {:.3f}'.format(j+1,n,f_guess,chi2r[j]))
chi2r = np.array(chi2r)
f0 = f_range[chi2r.argmin()]
if display:
plt.figure(figsize=kwargs.pop('figsize',(8,4)))
plt.title(kwargs.pop('title',''))
plt.xlim(f_range[0], f_range[:chi2r.shape[0]].max())
plt.ylim(chi2r.min()*0.9, chi2r.max()*1.1)
plt.plot(f_range[:chi2r.shape[0]],chi2r,
linestyle = kwargs.pop('linestyle','-'),
color = kwargs.pop('color','gray'),
marker = kwargs.pop('marker','.'),
markerfacecolor='r', markeredgecolor='r', **kwargs)
plt.xlabel('flux')
plt.ylabel(r'$\chi^2_{r}$')
plt.grid('on')
if save:
plt.savefig('chi2rVSflux.pdf')
if display:
plt.show()
return (r0,theta0,f0)
def firstguess_simplex(p, cube, angs, psf, plsc, ncomp, fwhm, annulus_width,
aperture_radius, cube_ref=None, svd_mode='lapack',
scaling=None, fmerit='sum', collapse='median', p_ini=None,
options=None, verbose=False, **kwargs):
"""
Determine the position of a companion using the negative fake companion
technique and a standard minimization algorithm (Default=Nelder-Mead) .
Parameters
----------
p : np.array
Estimate of the candidate position.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
psf: numpy.array
The scaled psf expressed as a numpy.array.
plsc: float
The platescale, in arcsec per pixel.
ncomp: int
The number of principal components.
fwhm : float
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
p_ini : np.array
Position (r, theta) of the circular aperture center.
options: dict, optional
The scipy.optimize.minimize options.
verbose : boolean, optional
If True, informations are displayed in the shell.
Returns
-------
out : scipy.optimize.minimize solution object
The solution of the minimization algorithm.
"""
if verbose:
print('')
print('{} minimization is running...'.format(options.get('method','Nelder-Mead')))
if p_ini is None:
p_ini = p
solu = minimize(chisquare, p, args=(cube, angs, plsc, psf, fwhm, annulus_width,
aperture_radius, p_ini, ncomp, cube_ref,
svd_mode, scaling, fmerit, collapse),
method = options.pop('method','Nelder-Mead'),
options=options, **kwargs)
if verbose: print(solu)
return solu
def firstguess(cube, angs, psfn, ncomp, plsc, planets_xy_coord, fwhm=4,
annulus_width=3, aperture_radius=4, cube_ref=None,
svd_mode='lapack', scaling=None, fmerit='sum', collapse='median',
p_ini=None, f_range=None, simplex=True, simplex_options=None,
display=False, verbose=True, save=False, figure_options=None):
""" Determines a first guess for the position and the flux of a planet.
We process the cube without injecting any negative fake companion.
This leads to the visual detection of the planet(s). For each of them,
one can estimate the (x,y) coordinates in pixel for the position of the
star, as well as the planet(s).
From the (x,y) coordinates in pixels for the star and planet(s), we can
estimate a preliminary guess for the position and flux for each planet
by using the method "firstguess_from_coord". The argument "f_range" allows
to indicate prior limits for the flux (optional, default: None).
This step can be reiterate to refine the preliminary guess for the flux.
We can go a step further by using a Simplex Nelder_Mead minimization to
estimate the first guess based on the preliminary guess.
Parameters
----------
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
psfn: numpy.array
The centered and normalized (flux in a 1*FWHM aperture must equal 1)
PSF 2d-array.
ncomp: int
The number of principal components.
plsc: float
The platescale, in arcsec per pixel.
planet_xy_coord: array or list
The list of (x,y) positions of the planets.
fwhm : float, optional
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
p_ini: numpy.array
Position (r, theta) of the circular aperture center.
f_range: numpy.array, optional
The range of flux tested values. If None, 20 values between 0 and 5000
are tested.
simplex: boolean, optional
If True, the Nelder-Mead minimization is performed after the flux grid
search.
simplex_options: dict, optional
The scipy.optimize.minimize options.
display: boolean, optional
If True, the figure chi2 vs. flux is displayed.
verbose: boolean
If True, display intermediate info in the shell.
save: boolean, optional
If True, the figure chi2 vs. flux is saved.
figure_options: dict, optional
Additional parameters are passed to the matplotlib plot method.
Returns
-------
out : The radial coordinates and the flux of the companion.
WARNING: POLAR ANGLE IS NOT THE CONVENTIONAL NORTH-TO-EAST P.A.
"""
if verbose: start_time = time_ini()
if figure_options is None:
figure_options = {'color':'gray', 'marker':'.',
'title':r'$\chi^2_{r}$ vs flux'}
planets_xy_coord = np.array(planets_xy_coord)
n_planet = planets_xy_coord.shape[0]
center_xy_coord = np.array(frame_center(cube[0]))
if f_range is None:
f_range = np.linspace(0,5000,20)
if simplex_options is None:
simplex_options = {'xtol':1e-1, 'maxiter':500, 'maxfev':1000}
r_0 = np.zeros(n_planet)
theta_0 = np.zeros_like(r_0)
f_0 = np.zeros_like(r_0)
for index_planet in range(n_planet):
if verbose:
print('')
print(sep)
print(' Planet {} '.format(index_planet))
print(sep)
print('')
msg2 = 'Planet {}: flux estimation at the position [{},{}], running ...'
print(msg2.format(index_planet,planets_xy_coord[index_planet,0],
planets_xy_coord[index_planet,1]))
res_init = firstguess_from_coord(planets_xy_coord[index_planet],
center_xy_coord, cube, angs, plsc, psfn,
fwhm, annulus_width, aperture_radius,
ncomp, f_range=f_range,
cube_ref=cube_ref, svd_mode=svd_mode,
scaling=scaling, fmerit=fmerit,
collapse=collapse, display=display,
verbose=verbose, save=save,
**figure_options)
r_pre, theta_pre, f_pre = res_init
if verbose:
msg3 = 'Planet {}: preliminary guess: (r, theta, f)=({:.1f}, {:.1f}, {:.1f})'
print(msg3.format(index_planet,r_pre, theta_pre, f_pre))
if simplex:
if verbose:
msg4 = 'Planet {}: Simplex Nelder-Mead minimization, running ...'
print(msg4.format(index_planet))
res = firstguess_simplex((r_pre,theta_pre,f_pre), cube, angs, psfn,
plsc, ncomp, fwhm, annulus_width,
aperture_radius, cube_ref=cube_ref,
svd_mode=svd_mode, scaling=scaling,
fmerit=fmerit, collapse=collapse, p_ini=p_ini,
options=simplex_options, verbose=False)
r_0[index_planet], theta_0[index_planet], f_0[index_planet] = res.x
if verbose:
msg5 = 'Planet {}: Success: {}, nit: {}, nfev: {}, chi2r: {}'
print(msg5.format(index_planet,res.success,res.nit,res.nfev,
res.fun))
print('message: {}'.format(res.message))
else:
if verbose:
msg4bis = 'Planet {}: Simplex Nelder-Mead minimization skipped.'
print(msg4bis.format(index_planet))
r_0[index_planet] = r_pre
theta_0[index_planet] = theta_pre
f_0[index_planet] = f_pre
if verbose:
centy, centx = frame_center(cube[0])
posy = r_0 * np.sin(np.deg2rad(theta_0[index_planet])) + centy
posx = r_0 * np.cos(np.deg2rad(theta_0[index_planet])) + centx
msg6 = 'Planet {}: simplex result: (r, theta, f)=({:.3f}, {:.3f}'
msg6 += ', {:.3f}) at \n (X,Y)=({:.2f}, {:.2f})'
print(msg6.format(index_planet, r_0[index_planet],
theta_0[index_planet], f_0[index_planet], posx[0], posy[0]))
if verbose:
print('\n', sep, '\nDONE !\n', sep)
timing(start_time)
return (r_0,theta_0,f_0)
|
henry-ngo/VIP
|
vip_hci/negfc/simplex_optim.py
|
Python
|
mit
| 16,676
|
import unittest
import syzoj
import hashlib
from random import randint
class TestRegister(unittest.TestCase):
def md5_pass(self, password):
md5 = hashlib.md5()
md5.update(password)
return md5.hexdigest()
def test_register(self):
user = "tester_%d" % randint(1, int(1e9))
pw = self.md5_pass("123_%d" % randint(1, 100))
email = "84%d@qq.com" % randint(1, 10000)
print user, pw, email
self.assertEqual(syzoj.controller.register(user, pw, email), 1)
self.assertNotEqual(syzoj.controller.register(user, pw, email), 1)
def test_multiple_register(self):
rid = randint(1, 10000)
for i in range(1, 2):
pw = self.md5_pass("123_%d_%d" % (rid, i))
print i, pw
self.assertEqual(syzoj.controller.register("hello_%d_%d" % (rid, i), pw, "%d@qq.com" % i), 1)
if __name__ == "__main__":
unittest.main()
|
cdcq/jzyzj
|
test/test_controller.py
|
Python
|
mit
| 932
|
#!/usr/bin/python
#
# Request for symbolList. Currently RFA only support refresh messages
# for symbolList. Hence, polling is required and symbolListRequest is called
# internally by getSymbolList.
#
# IMAGE/REFRESH:
# ({'MTYPE':'REFRESH','RIC':'0#BMD','SERVICE':'NIP'},
# {'ACTION':'ADD','MTYPE':'IMAGE','SERVICE':'NIP','RIC':'0#BMD','KEY':'FKLI'},
# {'ACTION':'ADD','MTYPE':'IMAGE','SERVICE':'NIP','RIC':'0#BMD','KEY':'FKLL'},
# {'ACTION':'ADD','MTYPE':'IMAGE','SERVICE':'NIP','RIC':'0#BMD','KEY':'FKLM'})
#
import pyrfa
p = pyrfa.Pyrfa()
p.createConfigDb("./pyrfa.cfg")
p.acquireSession("Session3")
p.createOMMConsumer()
p.login()
p.directoryRequest()
p.dictionaryRequest()
RIC = "0#BMD"
symbolList = p.getSymbolList(RIC)
print("\n=======\n" + RIC + "\n=======")
print(symbolList.replace(" ","\n"))
|
devcartel/pyrfa
|
examples/symbollist.py
|
Python
|
mit
| 814
|
def RGB01ToHex(rgb):
"""
Return an RGB color value as a hex color string.
"""
return '#%02x%02x%02x' % tuple([int(x * 255) for x in rgb])
def hexToRGB01(hexColor):
"""
Return a hex color string as an RGB tuple of floats in the range 0..1
"""
h = hexColor.lstrip('#')
return tuple([x / 255.0 for x in [int(h[i:i + 2], 16) for i in (0, 2, 4)]])
|
bohdon/maya-pulse
|
src/pulse/scripts/pulse/colors.py
|
Python
|
mit
| 381
|
from _pydev_runfiles import pydev_runfiles_xml_rpc
import pickle
import zlib
import base64
import os
from pydevd_file_utils import canonical_normalized_path
import pytest
import sys
import time
try:
from pathlib import Path
except:
Path = None
#=========================================================================
# Load filters with tests we should skip
#=========================================================================
py_test_accept_filter = None
def _load_filters():
global py_test_accept_filter
if py_test_accept_filter is None:
py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP')
if py_test_accept_filter:
py_test_accept_filter = pickle.loads(
zlib.decompress(base64.b64decode(py_test_accept_filter)))
if Path is not None:
# Newer versions of pytest resolve symlinks, so, we
# may need to filter with a resolved path too.
new_dct = {}
for filename, value in py_test_accept_filter.items():
new_dct[canonical_normalized_path(str(Path(filename).resolve()))] = value
py_test_accept_filter.update(new_dct)
else:
py_test_accept_filter = {}
def is_in_xdist_node():
main_pid = os.environ.get('PYDEV_MAIN_PID')
if main_pid and main_pid != str(os.getpid()):
return True
return False
connected = False
def connect_to_server_for_communication_to_xml_rpc_on_xdist():
global connected
if connected:
return
connected = True
if is_in_xdist_node():
port = os.environ.get('PYDEV_PYTEST_SERVER')
if not port:
sys.stderr.write(
'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n')
else:
pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True)
PY2 = sys.version_info[0] <= 2
PY3 = not PY2
class State:
start_time = time.time()
buf_err = None
buf_out = None
def start_redirect():
if State.buf_out is not None:
return
from _pydevd_bundle import pydevd_io
State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
def get_curr_output():
buf_out = State.buf_out
buf_err = State.buf_err
return buf_out.getvalue() if buf_out is not None else '', buf_err.getvalue() if buf_err is not None else ''
def pytest_unconfigure():
if is_in_xdist_node():
return
# Only report that it finished when on the main node (we don't want to report
# the finish on each separate node).
pydev_runfiles_xml_rpc.notifyTestRunFinished(
'Finished in: %.2f secs.' % (time.time() - State.start_time,))
def pytest_collection_modifyitems(session, config, items):
# A note: in xdist, this is not called on the main process, only in the
# secondary nodes, so, we'll actually make the filter and report it multiple
# times.
connect_to_server_for_communication_to_xml_rpc_on_xdist()
_load_filters()
if not py_test_accept_filter:
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
return # Keep on going (nothing to filter)
new_items = []
for item in items:
f = canonical_normalized_path(str(item.parent.fspath))
name = item.name
if f not in py_test_accept_filter:
# print('Skip file: %s' % (f,))
continue # Skip the file
i = name.find('[')
name_without_parametrize = None
if i > 0:
name_without_parametrize = name[:i]
accept_tests = py_test_accept_filter[f]
if item.cls is not None:
class_name = item.cls.__name__
else:
class_name = None
for test in accept_tests:
if test == name:
# Direct match of the test (just go on with the default
# loading)
new_items.append(item)
break
if name_without_parametrize is not None and test == name_without_parametrize:
# This happens when parameterizing pytest tests on older versions
# of pytest where the test name doesn't include the fixture name
# in it.
new_items.append(item)
break
if class_name is not None:
if test == class_name + '.' + name:
new_items.append(item)
break
if name_without_parametrize is not None and test == class_name + '.' + name_without_parametrize:
new_items.append(item)
break
if class_name == test:
new_items.append(item)
break
else:
pass
# print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests))
# Modify the original list
items[:] = new_items
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
try:
"""
pytest > 5.4 uses own version of TerminalWriter based on py.io.TerminalWriter
and assumes there is a specific method TerminalWriter._write_source
so try load pytest version first or fallback to default one
"""
from _pytest._io import TerminalWriter
except ImportError:
from py.io import TerminalWriter
def _get_error_contents_from_report(report):
if report.longrepr is not None:
try:
tw = TerminalWriter(stringio=True)
stringio = tw.stringio
except TypeError:
import io
stringio = io.StringIO()
tw = TerminalWriter(file=stringio)
tw.hasmarkup = False
report.toterminal(tw)
exc = stringio.getvalue()
s = exc.strip()
if s:
return s
return ''
def pytest_collectreport(report):
error_contents = _get_error_contents_from_report(report)
if error_contents:
report_test('fail', '<collect errors>', '<collect errors>', '', error_contents, 0.0)
def append_strings(s1, s2):
if s1.__class__ == s2.__class__:
return s1 + s2
if sys.version_info[0] == 2:
if not isinstance(s1, basestring):
s1 = str(s1)
if not isinstance(s2, basestring):
s2 = str(s2)
# Prefer bytes
if isinstance(s1, unicode):
s1 = s1.encode('utf-8')
if isinstance(s2, unicode):
s2 = s2.encode('utf-8')
return s1 + s2
else:
# Prefer str
if isinstance(s1, bytes):
s1 = s1.decode('utf-8', 'replace')
if isinstance(s2, bytes):
s2 = s2.decode('utf-8', 'replace')
return s1 + s2
def pytest_runtest_logreport(report):
if is_in_xdist_node():
# When running with xdist, we don't want the report to be called from the node, only
# from the main process.
return
report_duration = report.duration
report_when = report.when
report_outcome = report.outcome
if hasattr(report, 'wasxfail'):
if report_outcome != 'skipped':
report_outcome = 'passed'
if report_outcome == 'passed':
# passed on setup/teardown: no need to report if in setup or teardown
# (only on the actual test if it passed).
if report_when in ('setup', 'teardown'):
return
status = 'ok'
elif report_outcome == 'skipped':
status = 'skip'
else:
# It has only passed, skipped and failed (no error), so, let's consider
# error if not on call.
if report_when in ('setup', 'teardown'):
status = 'error'
else:
# any error in the call (not in setup or teardown) is considered a
# regular failure.
status = 'fail'
# This will work if pytest is not capturing it, if it is, nothing will
# come from here...
captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '')
for type_section, value in report.sections:
if value:
if type_section in ('err', 'stderr', 'Captured stderr call'):
error_contents = append_strings(error_contents, value)
else:
captured_output = append_strings(error_contents, value)
filename = getattr(report, 'pydev_fspath_strpath', '<unable to get>')
test = report.location[2]
if report_outcome != 'skipped':
# On skipped, we'll have a traceback for the skip, which is not what we
# want.
exc = _get_error_contents_from_report(report)
if exc:
if error_contents:
error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n')
error_contents = append_strings(error_contents, exc)
report_test(status, filename, test, captured_output, error_contents, report_duration)
def report_test(status, filename, test, captured_output, error_contents, duration):
'''
@param filename: 'D:\\src\\mod1\\hello.py'
@param test: 'TestCase.testMet1'
@param status: fail, error, ok
'''
time_str = '%.2f' % (duration,)
pydev_runfiles_xml_rpc.notifyTest(
status, captured_output, error_contents, filename, test, time_str)
if not hasattr(pytest, 'hookimpl'):
raise AssertionError('Please upgrade pytest (the current version of pytest: %s is unsupported)' % (pytest.__version__,))
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
report = outcome.get_result()
report.pydev_fspath_strpath = item.fspath.strpath
report.pydev_captured_output, report.pydev_error_contents = get_curr_output()
@pytest.mark.tryfirst
def pytest_runtest_setup(item):
'''
Note: with xdist will be on a secondary process.
'''
# We have our own redirection: if xdist does its redirection, we'll have
# nothing in our contents (which is OK), but if it does, we'll get nothing
# from pytest but will get our own here.
start_redirect()
filename = item.fspath.strpath
test = item.location[2]
pydev_runfiles_xml_rpc.notifyStartTest(filename, test)
|
glenngillen/dotfiles
|
.vscode/extensions/ms-python.python-2021.5.842923320/pythonFiles/lib/python/debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py
|
Python
|
mit
| 10,660
|
from __future__ import print_function
import time
import numpy as np
from mpi4py import MPI
from python_compat import range
comm = MPI.COMM_WORLD
def r_print(*args):
"""
print message on the root node (rank 0)
:param args:
:return:
"""
if comm.rank == 0:
print('ROOT:', end=' ')
for i in args:
print(i, end=' ')
# noinspection PyArgumentList
print()
def l_print(*args):
"""
print message on each node, synchronized
:param args:
:return:
"""
for rank in range(0, comm.size):
comm.Barrier()
if rank == comm.rank:
l_print_no_barrier(*args)
comm.Barrier()
def l_print_no_barrier(*args):
"""
print message on each node
:param args:
:return:
"""
print(comm.rank, ':', end=' ')
for i in args:
print(i, end=' ')
# noinspection PyArgumentList
print()
def get_chunks(num_items, num_steps):
"""
divide items into n=num_steps chunks
:param num_items:
:param num_steps:
:return: chunk sizes, chunk offsets
"""
chunk_sizes = np.zeros(num_steps, dtype=int)
chunk_sizes[:] = num_items // num_steps
chunk_sizes[:num_items % num_steps] += 1
chunk_offsets = np.roll(np.cumsum(chunk_sizes), 1)
chunk_offsets[0] = 0
return chunk_sizes, chunk_offsets
def barrier_sleep(mpi_comm=comm, tag=1747362612, sleep=0.1, use_yield=False):
"""
As suggested by Lisandro Dalcin at:
https://groups.google.com/forum/?fromgroups=#!topic/mpi4py/nArVuMXyyZI
"""
size = mpi_comm.Get_size()
if size == 1:
return
rank = mpi_comm.Get_rank()
mask = 1
while mask < size:
dst = (rank + mask) % size
src = (rank - mask + size) % size
req = mpi_comm.isend(None, dst, tag)
while not mpi_comm.Iprobe(src, tag):
if use_yield:
yield False
time.sleep(sleep)
mpi_comm.recv(None, src, tag)
req.Wait()
mask <<= 1
if use_yield:
yield True
|
yishayv/lyacorr
|
mpi_helper.py
|
Python
|
mit
| 2,068
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme = f.read()
requirements = [
'prov>=1.5.3',
]
test_requirements = [
'pydot>=1.2.0'
]
setup(
name='voprov',
version='0.0.2',
description='A library for IVOA Provenance Data Model supporting PROV-JSON, '
'PROV-XML and PROV-N',
long_description=readme,
author='Jean-Francois Sornay',
author_email='jeanfrancois.sornay@gmail.com',
url='https://github.com/sanguillon/voprov/',
packages=find_packages(),
include_package_data=True,
install_requires=requirements,
extras_require={
'dot': ['pydot>=1.2.0'],
},
license="MIT",
zip_safe=False,
keywords=[
'provenance', 'graph', 'model', 'VOPROV', 'provenance-dm', 'PROVENANCE-DM', 'PROV-JSON', 'JSON',
'PROV-XML', 'PROV-N'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: French',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Scientific/Engineering :: Information Analysis',
],
tests_require=test_requirements,
python_requires='>=2',
)
|
sanguillon/voprov
|
setup.py
|
Python
|
mit
| 1,855
|
import pandas as pd
from sklearn import model_selection
from sklearn.ensemble import RandomForestClassifier
url = "https://archive.ics.uci.edu/ml/machine-learning-databases/pima-indians-diabetes/pima-indians-diabetes.data"
names = ['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']
df = pd.read_csv(url, names=names)
array = df.values
X = array[:,0:8]
y = array[:,8]
seed = 21
num_trees = 100
max_features = 3
kfold = model_selection.KFold(n_splits=10, random_state=seed)
model = RandomForestClassifier(n_estimators=num_trees, max_features=max_features)
results = model_selection.cross_val_score(model, X, y, cv=kfold)
print('results: ')
print(results)
print()
print('mean: ' + str(results.mean()))
|
sindresf/The-Playground
|
Python/Machine Learning/ScikitClassifiers/Classifiers/Random_Forrest_Classification.py
|
Python
|
mit
| 724
|
# -*- coding: utf-8 -*-
#
# scikit-learn documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 8 09:13:42 2010.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import print_function
import sys
import os
import warnings
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.insert(0, os.path.abspath('sphinxext'))
from github_link import make_linkcode_resolve
import sphinx_gallery
import chainladder as cl
# this is needed for some reason...
# see https://github.com/numpy/numpydoc/issues/69
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'numpydoc', 'sphinx_gallery.gen_gallery',
'sphinx.ext.githubpages', 'nbsphinx', 'sphinx.ext.mathjax',
'sphinx.ext.autosummary', 'sphinx_gallery.load_style',
'IPython.sphinxext.ipython_console_highlighting']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# this is needed for some reason...
# see https://github.com/numpy/numpydoc/issues/69
numpydoc_class_members_toctree = False
# For maths, use mathjax by default and svg if NO_MATHJAX env variable is set
# (useful for viewing the doc offline)
if os.environ.get('NO_MATHJAX'):
extensions.append('sphinx.ext.imgmath')
imgmath_image_format = 'svg'
else:
extensions.append('sphinx.ext.mathjax')
mathjax_path = ('https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/'
'MathJax.js?config=TeX-AMS_SVG')
autodoc_default_flags = ['members', 'inherited-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# generate autosummary even if no references
autosummary_generate = True
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'ChainLadder'
copyright = '2017, John Bogaardt'
author = 'John Bogaardt'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = cl.__version__
# The full version, including alpha/beta/rc tags.
release = cl.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
#language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'templates', 'includes', 'themes', '**.ipynb_checkpoints']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = 'any'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'scikit-learn'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {'oldversion': False, 'collapsiblesidebar': True,
'google_analytics': False, 'surveybanner': False,
'sprintbanner': False}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['themes']
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'chainladder'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static/images']
# -- Options for HTMLHelp output ------------------------------------------
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'chainladderdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': r"""
\usepackage{amsmath}\usepackage{amsfonts}\usepackage{bm}
\usepackage{morefloats}\usepackage{enumitem} \setlistdepth{10}
"""
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [('index', 'user_guide.tex', 'scikit-learn user guide',
'scikit-learn developers', 'manual'), ]
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = False
trim_doctests_flags = True
# intersphinx configuration
intersphinx_mapping = {
'python': ('https://docs.python.org/{.major}'.format(
sys.version_info), None),
'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
'matplotlib': ('https://matplotlib.org/', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),
'joblib': ('https://joblib.readthedocs.io/en/latest/', None),
}
sphinx_gallery_conf = {
'doc_module': 'chainladder',
'backreferences_dir': os.path.join('modules', 'generated'),
'reference_url': {'chainladder': None},
'capture_repr': ()
}
# The following dictionary contains the information used to create the
# thumbnails for the front page of the scikit-learn home page.
# key: first image in set
# values: (number of plot in set, height of thumbnail)
carousel_thumbs = {'sphx_glr_plot_asvanced_triangle_001.png': 600,
'sphx_glr_plot_ave_analysis_001.png': 372,
}
def make_carousel_thumbs(app, exception):
"""produces the final resized carousel images"""
if exception is not None:
return
print('Preparing carousel images')
image_dir = os.path.join(app.builder.outdir, '_images')
for glr_plot, max_width in carousel_thumbs.items():
image = os.path.join(image_dir, glr_plot)
if os.path.exists(image):
c_thumb = os.path.join(image_dir, glr_plot[:-4] + '_carousel.png')
sphinx_gallery.gen_rst.scale_image(image, c_thumb, max_width, 190)
# Config for sphinx_issues
issues_uri = 'https://github.com/casact/chainladder-python/issues/{issue}'
issues_github_path = 'chainladder-python/chainladder'
issues_user_uri = 'https://github.com/{user}'
def setup(app):
# to hide/show the prompt in code examples:
app.add_js_file('js/copybutton.js')
app.add_js_file('js/extra.js')
app.connect('build-finished', make_carousel_thumbs)
# The following is used by sphinx.ext.linkcode to provide links to github
linkcode_resolve = make_linkcode_resolve('chainladder',
u'https://github.com/casact/'
'chainladder-python/blob/{revision}/'
'{package}/{path}#L{lineno}')
warnings.filterwarnings("ignore", category=UserWarning,
module="matplotlib",
message='Matplotlib is currently using agg, which is a'
' non-GUI backend, so cannot show the figure.')
|
jbogaardt/chainladder-python
|
docs/conf.py
|
Python
|
mit
| 8,890
|
from python.equal import Equal
def count_steps_test():
equal_instance = Equal()
array_a = [2, 2, 3, 7]
array_b = [53, 361, 188, 665, 786, 898, 447, 562, 272, 123, 229, 629, 670,
848, 994, 54, 822, 46, 208, 17, 449, 302, 466, 832, 931, 778,
156, 39, 31, 777, 749, 436, 138, 289, 453, 276, 539, 901, 839,
811, 24, 420, 440, 46, 269, 786, 101, 443, 832, 661, 460, 281,
964, 278, 465, 247, 408, 622, 638, 440, 751, 739, 876, 889, 380,
330, 517, 919, 583, 356, 83, 959, 129, 875, 5, 750, 662, 106,
193, 494, 120, 653, 128, 84, 283, 593, 683, 44, 567, 321, 484,
318, 412, 712, 559, 792, 394, 77, 711, 977, 785, 146, 936, 914,
22, 942, 664, 36, 400, 857]
array_c = [520, 862, 10, 956, 498, 956, 991, 542, 523, 664, 378, 194, 76,
90, 753, 868, 837, 830, 932, 814, 616, 78, 103, 882, 452, 397,
899, 488, 149, 108, 723, 22, 323, 733, 330, 821, 41, 322, 715,
917, 986, 93, 111, 63, 535, 864, 931, 372, 47, 215, 539, 15, 294,
642, 897, 98, 391, 796, 939, 540, 257, 662, 562, 580, 747, 893,
401, 789, 215, 468, 58, 553, 561, 169, 616, 448, 385, 900, 173,
432, 115, 712]
array_d = [761, 706, 697, 212, 97, 845, 151, 637, 102, 165, 200, 34, 912,
445, 435, 53, 12, 255, 111, 565, 816, 632, 534, 617, 18, 786,
790, 802, 253, 502, 602, 15, 208, 651, 227, 305, 848, 730, 294,
303, 895, 846, 337, 159, 291, 125, 565, 655, 380, 28, 221, 549,
13, 107, 166, 31, 245, 308, 185, 498, 810, 139, 865, 370, 790,
444, 27, 639, 174, 321, 294, 421, 168, 631, 933, 811, 756, 498,
467, 137, 878, 40, 686, 891, 499, 204, 274, 744, 512, 460, 242,
674, 599, 108, 396, 742, 552, 423, 733, 79, 96, 27, 852, 264,
658, 785, 76, 415, 635, 895, 904, 514, 935, 942, 757, 434, 498,
32, 178, 10, 844, 772, 36, 795, 880, 432, 537, 785, 855, 270,
864, 951, 649, 716, 568, 308, 854, 996, 75, 489, 891, 331, 355,
178, 273, 113, 612, 771, 497, 142, 133, 341, 914, 521, 488, 147,
953, 26, 284, 160, 648, 500, 463, 298, 568, 31, 958, 422, 379,
385, 264, 622, 716, 619, 800, 341, 732, 764, 464, 581, 258, 949,
922, 173, 470, 411, 672, 423, 789, 956, 583, 789, 808, 46, 439,
376, 430, 749, 151]
array_e = [134, 415, 784, 202, 34, 584, 543, 119, 701, 7, 700, 959, 956,
975, 484, 426, 738, 508, 201, 527, 816, 136, 668, 624, 535, 108,
1, 965, 857, 152, 478, 344, 567, 262, 546, 953, 199, 90, 72, 900,
449, 773, 211, 758, 100, 696, 536, 838, 204, 738, 717, 21, 874,
385, 997, 761, 845, 998, 78, 703, 502, 557, 47, 421, 819, 945,
375, 370, 35, 799, 622, 837, 924, 834, 595, 24, 882, 483, 862,
438, 221, 931, 811, 448, 317, 809, 561, 162, 159, 640, 217, 662,
197, 616, 435, 368, 562, 162, 739, 949, 962, 713, 786, 238, 899,
733, 263, 781, 217, 477, 220, 790, 409, 383, 590, 726, 192, 152,
240, 352, 792, 458, 366, 341, 74, 801, 709, 988, 964, 800, 938,
278, 514, 76, 516, 413, 810, 131, 547, 379, 609, 119, 169, 370,
502, 112, 448, 695, 264, 688, 399, 408, 498, 765, 749, 925, 918,
458, 913, 234, 611]
array_f = [512, 125, 928, 381, 890, 90, 512, 789, 469, 473, 908, 990, 195,
763, 102, 643, 458, 366, 684, 857, 126, 534, 974, 875, 459, 892,
686, 373, 127, 297, 576, 991, 774, 856, 372, 664, 946, 237, 806,
767, 62, 714, 758, 258, 477, 860, 253, 287, 579, 289, 496]
assert equal_instance.count_mim_steps(array_a) == 2
assert equal_instance.count_mim_steps(array_b) == 10605
assert equal_instance.count_mim_steps(array_c) == 8198
assert equal_instance.count_mim_steps(array_d) == 18762
assert equal_instance.count_mim_steps(array_e) == 16931
assert equal_instance.count_mim_steps(array_f) == 5104
|
AppliedAlgorithmsGroup/leon-lee
|
test/python/equal_tests.py
|
Python
|
mit
| 4,152
|
from textwrap import dedent
from pprint import pformat
from collections import OrderedDict
import attr
from . import sentinel
from .ordering import Ordering
# adapted from https://stackoverflow.com/a/47663099/1615465
def no_default_vals_in_repr(cls):
"""Class decorator on top of attr.s that omits attributes from repr that
have their default value"""
defaults = OrderedDict()
for attribute in cls.__attrs_attrs__:
if isinstance(attribute.default, attr.Factory):
assert attribute.default.takes_self == False, 'not implemented'
defaults[attribute.name] = attribute.default.factory()
else:
defaults[attribute.name] = attribute.default
def repr_(self):
real_cls = self.__class__
qualname = getattr(real_cls, "__qualname__", None)
if qualname is not None:
class_name = qualname.rsplit(">.", 1)[-1]
else:
class_name = real_cls.__name__
attributes = defaults.keys()
return "{0}({1})".format(
class_name,
", ".join(
name + "=" + repr(getattr(self, name))
for name in attributes
if getattr(self, name) != defaults[name]))
cls.__repr__ = repr_
return cls
# SankeyDefinition
def _convert_bundles_to_dict(bundles):
if not isinstance(bundles, dict):
bundles = {k: v for k, v in enumerate(bundles)}
return bundles
def _convert_ordering(ordering):
if isinstance(ordering, Ordering):
return ordering
else:
return Ordering(ordering)
def _validate_bundles(instance, attribute, bundles):
# Check bundles
for k, b in bundles.items():
if not b.from_elsewhere:
if b.source not in instance.nodes:
raise ValueError('Unknown source "{}" in bundle {}'.format(
b.source, k))
if not isinstance(instance.nodes[b.source], ProcessGroup):
raise ValueError(
'Source of bundle {} is not a process group'.format(k))
if not b.to_elsewhere:
if b.target not in instance.nodes:
raise ValueError('Unknown target "{}" in bundle {}'.format(
b.target, k))
if not isinstance(instance.nodes[b.target], ProcessGroup):
raise ValueError(
'Target of bundle {} is not a process group'.format(k))
for u in b.waypoints:
if u not in instance.nodes:
raise ValueError('Unknown waypoint "{}" in bundle {}'.format(
u, k))
if not isinstance(instance.nodes[u], Waypoint):
raise ValueError(
'Waypoint "{}" of bundle {} is not a waypoint'.format(u,
k))
def _validate_ordering(instance, attribute, ordering):
for layer_bands in ordering.layers:
for band_nodes in layer_bands:
for u in band_nodes:
if u not in instance.nodes:
raise ValueError('Unknown node "{}" in ordering'.format(u))
@attr.s(slots=True, frozen=True)
class SankeyDefinition(object):
nodes = attr.ib()
bundles = attr.ib(converter=_convert_bundles_to_dict,
validator=_validate_bundles)
ordering = attr.ib(converter=_convert_ordering, validator=_validate_ordering)
flow_selection = attr.ib(default=None)
flow_partition = attr.ib(default=None)
time_partition = attr.ib(default=None)
def copy(self):
return self.__class__(self.nodes.copy(), self.bundles.copy(),
self.ordering, self.flow_partition,
self.flow_selection, self.time_partition)
def to_code(self):
nodes = "\n".join(
" %s: %s," % (repr(k), pformat(v)) for k, v in self.nodes.items()
)
ordering = "\n".join(
" %s," % repr([list(x) for x in layer]) for layer in self.ordering.layers
# convert to list just because it looks neater
)
bundles = "\n".join(
" %s," % pformat(bundle) for bundle in self.bundles.values()
)
if self.flow_selection is not None:
flow_selection = "flow_selection = %s\n\n" % pformat(self.flow_selection)
else:
flow_selection = ""
if self.flow_partition is not None:
flow_partition = "flow_partition = %s\n\n" % pformat(self.flow_partition)
else:
flow_partition = ""
if self.time_partition is not None:
time_partition = "time_partition = %s\n\n" % pformat(self.time_partition)
else:
time_partition = ""
code = dedent("""
from floweaver import (
ProcessGroup,
Waypoint,
Partition,
Group,
Elsewhere,
Bundle,
SankeyDefinition,
)
nodes = {
%s
}
ordering = [
%s
]
bundles = [
%s
]
%s%s%ssdd = SankeyDefinition(nodes, bundles, ordering%s%s%s)
""") % (
nodes,
ordering,
bundles,
flow_selection,
flow_partition,
time_partition,
(", flow_selection=flow_selection" if flow_selection else ""),
(", flow_partition=flow_partition" if flow_partition else ""),
(", time_partition=time_parititon" if time_partition else "")
)
return code
# ProcessGroup
def _validate_direction(instance, attribute, value):
if value not in 'LR':
raise ValueError('direction must be L or R')
@no_default_vals_in_repr
@attr.s(slots=True)
class ProcessGroup(object):
"""A ProcessGroup represents a group of processes from the underlying dataset.
The processes to include are defined by the `selection`. By default they
are all lumped into one node in the diagram, but by defining a `partition`
this can be controlled.
Attributes
----------
selection : list or string
If a list of strings, they are taken as process ids.
If a single string, it is taken as a Pandas query string run against the
process table.
partition : Partition, optional
Defines how to split the ProcessGroup into subgroups.
direction : 'R' or 'L'
Direction of flow, default 'R' (left-to-right).
title : string, optional
Label for the ProcessGroup. If not set, the ProcessGroup id will be used.
"""
selection = attr.ib(default=None)
partition = attr.ib(default=None)
direction = attr.ib(validator=_validate_direction, default='R')
title = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(str)))
# Waypoint
@no_default_vals_in_repr
@attr.s(slots=True)
class Waypoint(object):
"""A Waypoint represents a control point along a :class:`Bundle` of flows.
There are two reasons to define Waypoints: to control the routing of
:class:`Bundle` s of flows through the diagram, and to split flows according
to some attributes by setting a `partition`.
Attributes
----------
partition : Partition, optional
Defines how to split the Waypoint into subgroups.
direction : 'R' or 'L'
Direction of flow, default 'R' (left-to-right).
title : string, optional
Label for the Waypoint. If not set, the Waypoint id will be used.
"""
partition = attr.ib(default=None)
direction = attr.ib(validator=_validate_direction, default='R')
title = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(str)))
# Bundle
Elsewhere = sentinel.create('Elsewhere')
def _validate_flow_selection(instance, attribute, value):
if instance.source == instance.target and not value:
raise ValueError('flow_selection is required for bundle with same '
'source and target')
@no_default_vals_in_repr
@attr.s(frozen=True, slots=True)
class Bundle(object):
"""A Bundle represents a set of flows between two :class:`ProcessGroup`s.
Attributes
----------
source : string
The id of the :class:`ProcessGroup` at the start of the Bundle.
target : string
The id of the :class:`ProcessGroup` at the end of the Bundle.
waypoints : list of strings
Optional list of ids of :class:`Waypoint`s the Bundle should pass through.
flow_selection : string, optional
Query string to filter the flows included in this Bundle.
flow_partition : Partition, optional
Defines how to split the flows in the Bundle into sub-flows. Often you want
the same Partition for all the Bundles in the diagram, see
:attr:`SankeyDefinition.flow_partition`.
default_partition : Partition, optional
Defines the Partition applied to any Waypoints automatically added to route
the Bundle across layers of the diagram.
"""
source = attr.ib()
target = attr.ib()
waypoints = attr.ib(default=attr.Factory(tuple), converter=tuple)
flow_selection = attr.ib(default=None, validator=_validate_flow_selection)
flow_partition = attr.ib(default=None)
default_partition = attr.ib(default=None)
@property
def to_elsewhere(self):
"""True if the target of the Bundle is Elsewhere (outside the system
boundary)."""
return self.target is Elsewhere
@property
def from_elsewhere(self):
"""True if the source of the Bundle is Elsewhere (outside the system
boundary)."""
return self.source is Elsewhere
|
ricklupton/sankeyview
|
floweaver/sankey_definition.py
|
Python
|
mit
| 9,779
|
# imports/modules
import os
import random
import json
import collections
from PIL import Image
# Convert (r, g, b) into #rrggbb color
def getRGBstring( (r, g, b) ):
s = "#"
s = s + format(r, '02x')
s = s + format(g, '02x')
s = s + format(b, '02x')
return s
def do_compute():
# Open the image
origImgFile = 'res/bryce.jpg'
origImg = Image.open(origImgFile)
# Process the image
# Save the processed information
output = { 'file': origImgFile,
'freq': freq }
f = open("res/freq.json",'w')
s = json.dumps(output, indent = 4)
f.write(s)
|
CS205IL-sp15/workbook
|
demo_colorFreq_start/py/compute.py
|
Python
|
mit
| 594
|
# Sample Python/Pygame Programs
# Simpson College Computer Science
# http://cs.simpson.edu
import pygame
# Define some colors
black = ( 0, 0, 0)
white = ( 255, 255, 255)
green = ( 0, 255, 0)
red = ( 255, 0, 0)
pygame.init()
# Set the height and width of the screen
size=[700,500]
screen=pygame.display.set_mode(size)
pygame.display.set_caption("My Game")
#Loop until the user clicks the close button.
done=False
# Used to manage how fast the screen updates
clock=pygame.time.Clock()
# -------- Main Program Loop -----------
while done==False:
for event in pygame.event.get(): # User did something
if event.type == pygame.QUIT: # If user clicked close
done=True # Flag that we are done so we exit this loop
# Set the screen background
screen.fill(black)
# Limit to 20 frames per second
clock.tick(20)
# Go ahead and update the screen with what we've drawn.
pygame.display.flip()
# Be IDLE friendly. If you forget this line, the program will 'hang'
# on exit.
pygame.quit ()
|
tapomayukh/projects_in_python
|
sandbox_tapo/src/refs/Python Examples_Pygame/Python Examples/pygame_base_template.py
|
Python
|
mit
| 1,067
|
# -*- coding: utf-8 -*-
#
# makeenv documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 27 21:24:26 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
# readthedocs does not build from inside the makeenv environment so we have to
# hack it around a bit here
if "READTHEDOCS" in os.environ:
import sys
import tempfile
# put us on the sys.path
MAKEENV_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, MAKEENV_ROOT)
# to silence a warning from the makefile
os.environ.setdefault("PIP_DOWNLOAD_CACHE", tempfile.mkdtemp())
# build the module doc
import subprocess
subprocess.check_output(("make", "-C", MAKEENV_ROOT, "sphinx-module-rst"),
stderr=subprocess.STDOUT)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinxcontrib.programoutput']
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'makeenv'
copyright = u'2012, Arthur Noel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.1-dev"
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['.build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'makeenvdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'makeenv.tex', u'makeenv Documentation',
u'Arthur Noel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'makeenv', u'makeenv Documentation',
[u'Arthur Noel'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'makeenv', u'makeenv Documentation',
u'Arthur Noel', 'makeenv', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
highlight_language = "bash"
|
0compute/makeenv
|
doc/conf.py
|
Python
|
mit
| 8,696
|
from .Commerce import Commerce
from .Transaction import Transaction
|
lexotero/python-redsys
|
redsys/__init__.py
|
Python
|
mit
| 68
|
"""sandbox URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from library import views as library_views
router = routers.DefaultRouter()
router.register('authors', library_views.AuthorViewSet)
router.register('books', library_views.BookViewSet)
urlpatterns = [
url(r'^', include('library.urls')),
url(r'^api/', include(router.urls)),
url(r'^admin/', admin.site.urls),
]
|
nshafer/django-hashid-field
|
sandbox/sandbox/urls.py
|
Python
|
mit
| 1,076
|
# -*- coding: utf-8 -*-
from django.db import models
from tweets.models import Tweet
class Tag(models.Model):
name = models.CharField(max_length=255, unique=True, db_index=True)
is_hashtag = models.BooleanField(default=False)
tweets = models.ManyToManyField(Tweet, related_name='tags')
class Meta:
db_table = 'tags'
|
kk6/onedraw
|
onedraw/tags/models.py
|
Python
|
mit
| 345
|
'''A module containing a class for storing Creature objects in a
SQLite database.'''
import csv
import sqlite3
__all__ = ['CreatureDB']
class CreatureDB(object):
'''Class for storing Creature objects in a SQLite database.'''
def __init__(self, name='creature.db', use_nominal_cr=False):
self.min_cr = 0.0
self.max_cr = float('inf')
# set flags
self.using_nominal_cr = use_nominal_cr
# initialize database
self.connection = sqlite3.connect(name)
self.connection.text_factory = str
self._create_table()
def _construct_table_columns(self):
'''Constructs a tuple that defines the columns in
the "creatures" table
:returns tuple that defines the columns in "creatures" table
'''
columns = ('id integer primary key autoincrement',
'name varchar(45)')
# set type of CR column depending on flag
if self.using_nominal_cr:
columns = columns + ('CR varchar(10)',)
else:
columns = columns + ('CR real',)
# add the remaining database fields to column tuple
main_entry_columns = (
'hp integer', 'HD integer',
'ac integer', 'touch_ac integer', 'flatfooted_ac integer',
'Fort integer', 'Ref integer', 'Will integer',
'Str integer', 'Dex integer', 'Con integer',
'Int integer', 'Wis integer', 'Cha integer',
'BAB integer', 'CMB integer', 'CMD integer'
)
columns = columns + main_entry_columns
return columns
def _construct_tuple_insert_values(self, creature):
'''Constructs a tuple of Creature values for insertion into
the "creatures" table
:returns tuple of values for insertion into "creatures" table
'''
values = (creature.name,)
# set value of CR column depending on flag
if self.using_nominal_cr:
values = values + ('CR ' + creature.cr,)
else:
values = values + (creature.cr,)
# add the remaining database fields to values tuple
main_entry_values = (
creature.hp,
creature.hd,
creature.ac['AC'],
creature.ac['touch'],
creature.ac['flat-footed'],
creature.saves['Fort'],
creature.saves['Ref'],
creature.saves['Will'],
creature.ability_scores['Str'],
creature.ability_scores['Dex'],
creature.ability_scores['Con'],
creature.ability_scores['Int'],
creature.ability_scores['Wis'],
creature.ability_scores['Cha'],
creature.bab,
creature.cmb,
creature.cmd
)
values = values + main_entry_values
return values
def _create_table(self):
'''Creates a SQLite table with the given name for storing
Creature objects if it does not already exist
:param name: a string value for the name of the table
'''
# create table
columns = self._construct_table_columns()
query = '''create table if not exists creatures
(
%s,%s,
%s,%s,
%s,%s,%s,
%s,%s,%s,
%s,%s,%s,%s,%s,%s,%s,
%s, %s, %s
)''' % columns
self.connection.execute(query)
def add_creature(self, creature):
'''Adds a Creature object as a row in the appropriate table
of the SQLite database
:param creature: a Creature object to be added to the database
'''
# check that creature CR is within desired range
creature_cr = float(creature.cr)
if creature_cr < self.min_cr or creature_cr > self.max_cr:
return
# ignore duplicate creatures
if self.is_creature_in_db(creature):
return
# insert creature into database
values = self._construct_tuple_insert_values(creature)
query = '''insert into creatures
(
name,CR,
hp,HD,
ac,touch_ac,flatfooted_ac,
Fort, Ref, Will,
Str,Dex,Con,Int,Wis,Cha,
BAB,CMB,CMD
)
values
(
?,?,
?,?,
?,?,?,
?,?,?,
?,?,?,?,?,?,
?,?,?
)'''
self.connection.execute(query, values)
def commit_and_close(self):
'''Commits any uncommitted changes to the SQLite database and
closes the connection
'''
self.connection.commit()
self.connection.close()
def export_as_csv(self, file_name='creature.csv'):
'''Exports the data in this object as a .csv file.
:param file_name: the name of the output csv file
'''
cursor = self.connection.cursor()
data = cursor.execute('select * from creatures')
# write data to output file
csv_file = open(file_name, 'w')
writer = csv.writer(csv_file)
writer.writerow([
'id',
'name', 'CR',
'hp', 'HD',
'ac', 'touch_ac', 'flatfooted_ac',
'Fort', 'Ref', 'Will',
'Str', 'Dex', 'Con', 'Int', 'Wis', 'Cha',
'BAB', 'CMB', 'CMD'
])
writer.writerows(data)
csv_file.close()
def is_creature_in_db(self, creature):
''' Determines whether or not a datbase entry exists for a
given creature
:returns True if entry exists, False otherwise
'''
# set value of CR column depending on flag
creature_cr = creature.cr
if self.using_nominal_cr:
creature_cr = 'CR ' + creature.cr
# query database for creature
values = (creature.name, creature_cr)
query = '''select * from creatures where name=? and cr=?'''
cursor = self.connection.cursor()
cursor.execute(query, values)
return cursor.fetchone() is not None
|
lot9s/pathfinder-rpg-utils
|
data-mining/bestiary/db/creatureDB.py
|
Python
|
mit
| 6,403
|
# Faça um Programa que pergunte quanto você ganha por hora
# e o número de horas trabalhadas no mês. Calcule e mostre o
# total do seu salário no referido mês.
salarioXhoras = float(input('Quanto voce ganha por hora? '))
horas = float(input('Quantas horas trabalhadas? '))
salario = horas * salarioXhoras
print('Voce ganhará %2.f de salario esse mes' %(salario))
|
GlauberGoncalves/Python
|
Lista PythonBrasil/exer08.py
|
Python
|
mit
| 378
|
from django_group_by import GroupByMixin
from django.db.models.query import QuerySet
class BookQuerySet(QuerySet, GroupByMixin):
pass
|
kako-nawao/django-group-by
|
test_app/query.py
|
Python
|
mit
| 141
|
#!/usr/bin/python
# ZetCode PyGTK tutorial
#
# This example shows how to use
# the Alignment widget
#
# author: jan bodnar
# website: zetcode.com
# last edited: February 2009
import gtk
import gobject
class PyApp(gtk.Window):
def __init__(self):
super(PyApp, self).__init__()
self.set_title("Alignment")
self.set_size_request(260, 150)
self.set_position(gtk.WIN_POS_CENTER)
vbox = gtk.VBox(False, 5)
hbox = gtk.HBox(True, 3)
valign = gtk.Alignment(0, 1, 0, 0)
vbox.pack_start(valign)
ok = gtk.Button("OK")
ok.set_size_request(70, 30)
close = gtk.Button("Close")
hbox.add(ok)
hbox.add(close)
halign = gtk.Alignment(1, 0, 0, 0)
halign.add(hbox)
vbox.pack_start(halign, False, False, 3)
self.add(vbox)
self.connect("destroy", gtk.main_quit)
self.show_all()
PyApp()
gtk.main()
|
HPPTECH/hpp_IOSTressTest
|
Refer/Alignment.py
|
Python
|
mit
| 995
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/QT/up-to-date.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Validate that a stripped-down real-world Qt configuation (thanks
to Leanid Nazdrynau) with a generated .h file is correctly
up-to-date after a build.
(This catches a bug that was introduced during a signature refactoring
ca. September 2005.)
"""
import os
import TestSCons
_obj = TestSCons._obj
test = TestSCons.TestSCons()
if not os.environ.get('QTDIR', None):
x ="External environment variable $QTDIR not set; skipping test(s).\n"
test.skip_test(x)
test.subdir('layer',
['layer', 'aclock'],
['layer', 'aclock', 'qt_bug'])
test.write('SConstruct', """\
import os
aa=os.getcwd()
env=Environment(tools=['default','expheaders','qt'],toolpath=[aa])
env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include')
if not os.access(env["EXP_HEADER_ABS"],os.F_OK):
os.mkdir (env["EXP_HEADER_ABS"])
Export('env')
env.SConscript('layer/aclock/qt_bug/SConscript')
""")
test.write('expheaders.py', """\
import SCons.Defaults
def ExpHeaderScanner(node, env, path):
return []
def generate(env):
HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0755)])
HeaderBuilder= SCons.Builder.Builder(action=HeaderAction)
env['BUILDERS']['ExportHeaders'] = HeaderBuilder
def exists(env):
return 0
""")
test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\
import os
Import ("env")
env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h')
env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h')
env.Append(CPPPATH=env["EXP_HEADER_ABS"])
env.StaticLibrary('all',['main.ui','migraform.ui','my.cc'])
""")
test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\
<!DOCTYPE UI><UI version="3.3" stdsetdef="1">
<class>Main</class>
<widget class="QWizard">
<property name="name">
<cstring>Main</cstring>
</property>
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>385</height>
</rect>
</property>
</widget>
<includes>
<include location="local" impldecl="in implementation">migraform.h</include>
</includes>
</UI>
""")
test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\
<!DOCTYPE UI><UI version="3.3" stdsetdef="1">
<class>MigrateForm</class>
<widget class="QWizard">
<property name="name">
<cstring>MigrateForm</cstring>
</property>
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>385</height>
</rect>
</property>
</widget>
</UI>
""")
test.write(['layer', 'aclock', 'qt_bug', 'my.cc'], """\
#include <main.h>
""")
my_obj = 'layer/aclock/qt_bug/my'+_obj
test.run(arguments = my_obj, stderr=None)
expect = my_obj.replace( '/', os.sep )
test.up_to_date(options = '--debug=explain',
arguments = (expect),
stderr=None)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
EmanueleCannizzaro/scons
|
test/QT/up-to-date.py
|
Python
|
mit
| 4,303
|
from concurrent.futures import ThreadPoolExecutor
import os
import re
import gzip
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import logging
import mimetypes
from collections import defaultdict
from flask import url_for as flask_url_for
from flask import current_app, request
from boto.s3.connection import S3Connection
from boto.exception import S3CreateError, S3ResponseError
from boto.s3.key import Key
logger = logging.getLogger('flask_s3')
mimetypes.add_type('text/css', '.less')
def url_for(endpoint, **values):
"""
Generates a URL to the given endpoint.
If the endpoint is for a static resource then an Amazon S3 URL is
generated, otherwise the call is passed on to `flask.url_for`.
Because this function is set as a jinja environment variable when
`FlaskS3.init_app` is invoked, this function replaces
`flask.url_for` in templates automatically. It is unlikely that this
function will need to be directly called from within your
application code, unless you need to refer to static assets outside
of your templates.
"""
app = current_app
if 'S3_BUCKET_NAME' not in app.config:
raise ValueError("S3_BUCKET_NAME not found in app configuration.")
if app.debug and not app.config['USE_S3_DEBUG']:
return flask_url_for(endpoint, **values)
if endpoint == 'static' or endpoint.endswith('.static'):
scheme = 'http'
scheme = app.config['S3_URL_SCHEME'] or 'https'
bucket_path = '%s.%s' % (app.config['S3_BUCKET_NAME'],
app.config['S3_BUCKET_DOMAIN'])
if app.config['S3_CDN_DOMAIN']:
bucket_path = '%s' % app.config['S3_CDN_DOMAIN']
if app.config['S3_PREFIX']:
bucket_path = "/".join((bucket_path, app.config['S3_PREFIX']))
urls = app.url_map.bind(bucket_path, url_scheme=scheme)
try:
mimetype = mimetypes.guess_type(values['filename'])[0]
except KeyError:
mimetype = None
if app.config['USE_GZIP']:
accept_encoding = request.headers.get('Accept-Encoding', '')
if (mimetype in app.config['S3_GZIP_CONTENT_TYPES'] and
'gzip' in accept_encoding.lower()):
values['filename'] += '.gz'
url = urls.build(endpoint, values=values, force_external=True)
if app.config['S3_URL_SCHEME'] is None:
url = re.sub(r'^https://', '//', url)
return url
return flask_url_for(endpoint, **values)
def _bp_static_url(blueprint):
""" builds the absolute url path for a blueprint's static folder """
u = u'%s%s' % (blueprint.url_prefix or '', blueprint.static_url_path or '')
return u
def _gather_files(app, hidden):
""" Gets all files in static folders and returns in dict."""
dirs = [(unicode(app.static_folder), app.static_url_path)]
if hasattr(app, 'blueprints'):
blueprints = app.blueprints.values()
bp_details = lambda x: (x.static_folder, _bp_static_url(x))
dirs.extend([bp_details(x) for x in blueprints if x.static_folder])
valid_files = defaultdict(list)
for static_folder, static_url_loc in dirs:
if not os.path.isdir(static_folder):
logger.warning("WARNING - [%s does not exist]" % static_folder)
else:
logger.debug("Checking static folder: %s" % static_folder)
for root, _, files in os.walk(static_folder):
files = [os.path.join(root, x) \
for x in files if hidden or x[0] != '.']
if files:
valid_files[(static_folder, static_url_loc)].extend(files)
return valid_files
def _path_to_relative_url(path):
""" Converts a folder and filename into a ralative url path """
return os.path.splitdrive(path)[1].replace('\\', '/')
def _static_folder_path(static_url, static_folder, static_asset, prefix=''):
"""
Returns a path to a file based on the static folder, and not on the
filesystem holding the file.
Returns a path relative to static_url for static_asset
"""
# first get the asset path relative to the static folder.
# static_asset is not simply a filename because it could be
# sub-directory then file etc.
if not static_asset.startswith(static_folder):
raise ValueError("%s startic asset must be under %s static folder" %
(static_asset, static_folder))
rel_asset = static_asset[len(static_folder):]
# Now bolt the static url path and the relative asset location together
key = u'%s/%s' % (static_url.rstrip('/'), rel_asset.lstrip('/'))
if prefix:
key = u'%s/%s' % (prefix, key)
return key
def _write_files(app, static_url_loc, static_folder, files, bucket,
ex_keys=None):
""" Writes all the files inside a static folder to S3. """
with ThreadPoolExecutor(app.config['S3_UPLOAD_COCURRENCY']) as executor:
for file_path in files:
asset_loc = _path_to_relative_url(file_path)
key_name = _static_folder_path(static_url_loc, static_folder,
asset_loc, app.config['S3_PREFIX'])
mimetype = mimetypes.guess_type(key_name)[0]
is_gzippable = mimetype in app.config['S3_GZIP_CONTENT_TYPES']
headers = app.config['S3_HEADERS']
msg = "Uploading %s to %s as %s" % (file_path, bucket, key_name)
logger.debug(msg)
if ex_keys and key_name in ex_keys:
logger.debug("%s excluded from upload" % key_name)
else:
do_gzip = app.config['USE_GZIP'] and is_gzippable
# upload origin file
executor.submit(_upload_file, file_path, bucket, key_name, headers)
# upload gzipped file (if enabled)
if do_gzip:
gzip_key_name = "%s.gz" % key_name
executor.submit(_upload_file, file_path, bucket, gzip_key_name, headers, True)
def _upload_file(file_path, bucket, key_name, headers={}, do_gzip=False):
k = Key(bucket=bucket, name=key_name)
for header, value in headers.items():
if (header, value) != ('Content-Encoding', 'gzip'):
k.set_metadata(header, value)
mimetype = mimetypes.guess_type(file_path)[0]
if mimetype:
k.set_metadata('Content-Type', mimetype)
with open(file_path) as f:
content = f.read()
if do_gzip:
k.set_metadata('Content-Encoding', 'gzip')
gzipped = StringIO()
with gzip.GzipFile(fileobj=gzipped, mode='w') as _gzip:
_gzip.write(content)
content = gzipped.getvalue()
try:
k.set_contents_from_string(content)
except S3ResponseError:
if not do_gzip:
k.set_contents_from_filename(file_path)
else:
raise
k.make_public()
return k
def _upload_files(app, files_, bucket):
for (static_folder, static_url), names in files_.iteritems():
_write_files(app, static_url, static_folder, names, bucket)
def create_all(app, user=None, password=None, bucket_name=None,
location='', include_hidden=False, force_refresh=False):
"""
Uploads of the static assets associated with a Flask application to
Amazon S3.
All static assets are identified on the local filesystem, including
any static assets associated with *registered* blueprints. In turn,
each asset is uploaded to the bucket described by `bucket_name`. If
the bucket does not exist then it is created.
Flask-S3 creates the same relative static asset folder structure on
S3 as can be found within your Flask application.
Many of the optional arguments to `create_all` can be specified
instead in your application's configuration using the Flask-S3
`configuration`_ variables.
:param app: a :class:`flask.Flask` application object.
:param user: an AWS Access Key ID. You can find this key in the
Security Credentials section of your AWS account.
:type user: `basestring` or None
:param password: an AWS Secret Access Key. You can find this key in
the Security Credentials section of your AWS
account.
:type password: `basestring` or None
:param bucket_name: the name of the bucket you wish to server your
static assets from. **Note**: while a valid
character, it is recommended that you do not
include periods in bucket_name if you wish to
serve over HTTPS. See Amazon's `bucket
restrictions`_ for more details.
:type bucket_name: `basestring` or None
:param location: the AWS region to host the bucket in; an empty
string indicates the default region should be used,
which is the US Standard region. Possible location
values include: `'DEFAULT'`, `'EU'`, `'USWest'`,
`'APSoutheast'`
:type location: `basestring` or None
:param include_hidden: by default Flask-S3 will not upload hidden
files. Set this to true to force the upload of hidden files.
:type include_hidden: `bool`
.. _bucket restrictions: http://docs.amazonwebservices.com/AmazonS3\
/latest/dev/BucketRestrictions.html
"""
if user is None and 'AWS_ACCESS_KEY_ID' in app.config:
user = app.config['AWS_ACCESS_KEY_ID']
if password is None and 'AWS_SECRET_ACCESS_KEY' in app.config:
password = app.config['AWS_SECRET_ACCESS_KEY']
if bucket_name is None and 'S3_BUCKET_NAME' in app.config:
bucket_name = app.config['S3_BUCKET_NAME']
if not bucket_name:
raise ValueError("No bucket name provided.")
# build list of static files
all_files = _gather_files(app, include_hidden)
logger.debug("All valid files: %s" % all_files)
conn = S3Connection(user, password) # connect to s3
# get_or_create bucket
try:
try:
bucket = conn.create_bucket(bucket_name, location=location)
bucket.make_public(recursive=True)
except S3CreateError as e:
if e.error_code == u'BucketAlreadyOwnedByYou':
bucket = conn.get_bucket(bucket_name)
if force_refresh:
bucket.make_public(recursive=True)
else:
raise e
except S3CreateError as e:
raise e
_upload_files(app, all_files, bucket)
class FlaskS3(object):
"""
The FlaskS3 object allows your application to use Flask-S3.
When initialising a FlaskS3 object you may optionally provide your
:class:`flask.Flask` application object if it is ready. Otherwise,
you may provide it later by using the :meth:`init_app` method.
:param app: optional :class:`flask.Flask` application object
:type app: :class:`flask.Flask` or None
"""
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
"""
An alternative way to pass your :class:`flask.Flask` application
object to Flask-S3. :meth:`init_app` also takes care of some
default `settings`_.
:param app: the :class:`flask.Flask` application object.
"""
defaults = [('S3_URL_SCHEME', None),
('S3_USE_HTTPS', None),
('USE_S3', True),
('USE_GZIP', False),
('USE_S3_DEBUG', False),
('S3_BUCKET_DOMAIN', 's3.amazonaws.com'),
('S3_CDN_DOMAIN', ''),
('S3_USE_CACHE_CONTROL', False),
('S3_HEADERS', {}),
('S3_GZIP_CONTENT_TYPES', (
'text/css',
'application/javascript',
'application/x-javascript',
)),
('S3_PREFIX', None),
('S3_UPLOAD_COCURRENCY', 32)]
for k, v in defaults:
app.config.setdefault(k, v)
if app.config['USE_S3']:
app.jinja_env.globals['url_for'] = url_for
if app.config['S3_USE_CACHE_CONTROL'] and 'S3_CACHE_CONTROL' in app.config:
cache_control_header = app.config['S3_CACHE_CONTROL']
app.config['S3_HEADERS']['Cache-Control'] = cache_control_header
if app.config['S3_URL_SCHEME'] is None and \
app.config['S3_USE_HTTPS'] is not None:
scheme = 'https' if app.config['S3_USE_HTTPS'] else 'http'
app.config['S3_URL_SCHEME'] = scheme
|
spoqa/flask-s3
|
flask_s3.py
|
Python
|
mit
| 12,801
|
import os
import unittest
import zope.testrunner
from zope import component
from sparc.testing.fixture import test_suite_mixin
from sparc.testing.testlayer import SPARC_INTEGRATION_LAYER
from sparc.db.splunk.testing import SPARC_DB_SPLUNK_INTEGRATION_LAYER
from zope import schema
from zope.interface import Interface
class ITestSchema(Interface):
date = schema.Date(title=u"date")
datetime = schema.Datetime(title=u"datetime")
decimal = schema.Decimal(title=u"decimal")
float = schema.Float(title=u"float")
int = schema.Int(title=u"int")
bool = schema.Bool(title=u"bool")
list = schema.Set(title=u"list", value_type=schema.Field(title=u"field"))
set = schema.Set(title=u"set", value_type=schema.Field(title=u"field"))
dict = schema.Dict(title=u"dict", key_type=schema.TextLine(title=u"key"),
value_type=schema.Text(title=u"value"))
ip = schema.DottedName(title=u"ip",min_dots=3,max_dots=3)
ascii = schema.ASCII(title=u"ascii")
class SparcCacheSplunkAreaTestCase(unittest.TestCase):
layer = SPARC_INTEGRATION_LAYER
sm = component.getSiteManager()
def test_ISplunkKVCollectionSchema_adapter_for_schemas(self):
from sparc.db.splunk import ISplunkKVCollectionSchema
schema = ISplunkKVCollectionSchema(ITestSchema)
self.assertIn('field.date', schema)
self.assertEquals(schema['field.date'], 'time')
self.assertIn('field.datetime', schema)
self.assertEquals(schema['field.datetime'], 'time')
self.assertIn('field.decimal', schema)
self.assertEquals(schema['field.decimal'], 'number')
self.assertIn('field.float', schema)
self.assertEquals(schema['field.float'], 'number')
self.assertIn('field.int', schema)
self.assertEquals(schema['field.int'], 'number')
self.assertIn('field.bool', schema)
self.assertEquals(schema['field.bool'], 'bool')
self.assertIn('field.list', schema)
self.assertEquals(schema['field.list'], 'array')
self.assertIn('field.set', schema)
self.assertEquals(schema['field.set'], 'array')
self.assertIn('field.dict', schema)
self.assertEquals(schema['field.dict'], 'array')
self.assertIn('field.ip', schema)
self.assertEquals(schema['field.ip'], 'cidr')
self.assertIn('field.ascii', schema)
self.assertEquals(schema['field.ascii'], 'string')
def test_bad_collection(self):
from sparc.db.splunk import ISplunkKVCollectionSchema
class ITestSchemaDict(Interface):
list = schema.List(title=u'bad',
value_type=schema.Dict(title=u'bad'))
sschema = ISplunkKVCollectionSchema(ITestSchemaDict)
self.assertNotIn('field.list', sschema)
class ITestSchemaCollection(Interface):
list = schema.List(title=u'bad',
value_type=schema.List(title=u'bad'))
sschema = ISplunkKVCollectionSchema(ITestSchemaDict)
self.assertNotIn('field.list', sschema)
kv_names = {}
kv_names['test_collection'] = {}
kv_names['test_collection']['field.id'] = "string"
kv_names['test_collection']['field.name'] = "string"
SPARC_DB_SPLUNK_INTEGRATION_LAYER.kv_names.update(kv_names)
class SparcDBSplunkKVTestCase(unittest.TestCase):
layer = SPARC_DB_SPLUNK_INTEGRATION_LAYER
level = 2
sm = component.getSiteManager()
def test_current_kv_names(self):
from sparc.db.splunk.kvstore import current_kv_names
req = component.createObject(u'sparc.utils.requests.request')
req.req_kwargs['verify'] = False
req.gooble_warnings = True
self.assertIn('test_collection', \
current_kv_names(self.layer.sci,
self.layer.kv_username,
self.layer.kv_appname,
request=req))
def test_schema_adapter_for_named_collection(self):
# tests SplunkKVCollectionSchemaFromSplunkInstance
from sparc.db.splunk import ISplunkKVCollectionSchema
from sparc.utils.requests import IRequest
kv_id = self.layer.get_kv_id(u'test_collection')
schema = component.getMultiAdapter((self.layer.sci,
kv_id,
self.sm.getUtility(IRequest)),
ISplunkKVCollectionSchema)
for k in self.layer.kv_names['test_collection'].keys():
self.assertEquals(self.layer.kv_names['test_collection'][k], schema[k])
class test_suite(test_suite_mixin):
package = 'sparc.db.splunk'
module = 'kvstore'
def __new__(cls):
suite = super(test_suite, cls).__new__(cls)
suite.addTest(unittest.makeSuite(SparcCacheSplunkAreaTestCase))
suite.addTest(unittest.makeSuite(SparcDBSplunkKVTestCase))
return suite
if __name__ == '__main__':
zope.testrunner.run([
'--path', os.path.dirname(__file__),
'--tests-pattern', os.path.splitext(
os.path.basename(__file__))[0]
])
|
davisd50/sparc.db
|
sparc/db/splunk/tests/test_kvstore.py
|
Python
|
mit
| 5,425
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-07-12 02:22
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('operation_finance', '0020_auto_20170711_1429'),
]
operations = [
migrations.AlterField(
model_name='invoice',
name='due_by',
field=models.DateField(default=datetime.datetime(2017, 8, 6, 2, 22, 37, 974278)),
),
]
|
michealcarrerweb/LHVent_app
|
operation_finance/migrations/0021_auto_20170712_0222.py
|
Python
|
mit
| 519
|
# -*- coding: utf -8*-
from setuptools import setup
setup(
name="linked list, stack, double linked list, queue, deque implementation",
description="This package implements a linked list",
version=0.1,
license='MIT',
author="Steven Than, Tatiana Weaver",
author_email="email@email.com",
py_modules=['linked_list', 'stack', 'dll', 'queue', 'deque'],
package_dir={' ': 'src'},
install_requires=[],
extras_require={'test': ['pytest', 'pytest-cov', 'tox']},
)
|
tanyaweaver/data-structures
|
setup.py
|
Python
|
mit
| 496
|
import collections
CmdArgs = collections.namedtuple('CmdArgs', ['split_channels', 'merge', 'image', 'prefix', 'list', 'layer'])
|
tiagoshibata/exrsplit
|
tests/cmdargs.py
|
Python
|
mit
| 129
|
#! /usr/local/bin/python -u
# Given an array and a value, remove all instances of that value in place and return the new length.
# The order of elements can be changed. It doesn't matter what you leave beyond the new length.
class Solution:
# @param A a list of integers
# @param elem an integer, value need to be removed
# @return an integer
def removeElement(self, A, elem):
if not A:
return len(A)
curr_idx = 0
total_array_len = len(A)
while (curr_idx <= total_array_len - 1):
if A[curr_idx] == elem:
del A[curr_idx]
total_array_len -= 1
else:
curr_idx += 1
return total_array_len
if __name__ == '__main__':
main([1], 1)
|
textsaurabh/code_base
|
src/leetcode/script/remove_element_inplace.py
|
Python
|
mit
| 815
|
#!/usr/bin/env python
import unittest
import pentai.base.human_player as h_m
import pentai.base.rules as r_m
import pentai.base.game as g_m
import pentai.ai.priority_filter as pf_m
import pentai.ai.utility_calculator as uc_m
from pentai.ai.ab_state import *
def get_black_line_counts(ab_game_state):
return ab_game_state.get_utility_stats().lines[P1]
def get_white_line_counts(ab_game_state):
return ab_game_state.get_utility_stats().lines[P2]
class AlphaBetaBridgeTest(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(13, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_update_substrips_middle_of_board(self):
self.gs.set_occ((7,7), P1)
"""
self.assertEquals(self.bl, [20, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_empty_board(self):
self.assertEquals(self.bl, [0, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_SW_corner(self):
self.gs.set_occ((0,0), P1)
self.assertEquals(self.bl, [3, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_near_SW_corner(self):
self.gs.set_occ((1,0), P1)
self.assertEquals(self.bl, [4, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_NE_corner(self):
self.gs.set_occ((12,12), P1)
self.assertEquals(self.bl, [3, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_remove_single_stone(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,0), EMPTY)
self.assertEquals(self.bl, [0, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_two_blacks_SW(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((1,1), P1)
self.assertEquals(self.bl, [7, 1, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_2_opp_colour_pieces(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,1), P2)
self.assertEquals(self.bl, [2, 0, 0, 0, 0])
self.assertEquals(self.wl, [3, 0, 0, 0, 0])
def test_update_substrips_2_pieces(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,1), P1)
self.assertEquals(self.bl, [5, 1, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_5_in_a_row(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,1), P1)
self.gs.set_occ((0,2), P1)
self.gs.set_occ((0,3), P1)
self.gs.set_occ((0,4), P1)
self.assertEquals(self.bl, [12, 1, 1, 1, 1])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
class LengthCountingTest(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(9, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_middle_for_black_diag_2_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((2,2), P2)
self.assertEquals(self.bl, [17, 0, 0, 0, 0])
self.assertEquals(self.wl, [7, 0, 0, 0, 0])
def test_middle_for_black_left_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((3,4), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
def test_middle_for_black_right_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((5,4), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
def test_middle_for_black_up_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((4,5), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
def test_middle_for_black_down_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((4,3), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
###############
class MoreAlphaBetaBridgeTests(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(5, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_initial_state_black_to_move(self):
self.assertEquals(self.s.to_move_colour(), P1)
def test_create_state(self):
child = self.s.create_state((2,2))
self.assertEquals(child.to_move_colour(), P2)
self.assertEquals(child.terminal(), False)
board = child.board()
self.assertEquals(board.get_occ((2,2)), P1)
self.assertEquals(board.get_occ((3,3)), EMPTY)
self.assertEquals(board.get_occ((1,1)), EMPTY)
def test_length_counters_after_sw_corner(self):
g1 = self.s.create_state((0,0)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_length_counters_after_nw_corner(self):
g1 = self.s.create_state((0,4)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_length_counters_after_ne_corner(self):
g1 = self.s.create_state((4,4)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_length_counters_after_se_corner(self):
g1 = self.s.create_state((4,0)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_cannot_place_off_e_edge(self):
try:
g1 = self.s.create_state((-1,2)) # B
except IllegalMoveException:
return
self.assertFail()
def test_length_counters_after_two_moves(self):
g1 = self.s.create_state((0,0)) # B
g2 = g1.create_state((1,1)) # W
self.assertEquals(get_black_line_counts(g2), [2, 0, 0, 0, 0])
self.assertEquals(get_white_line_counts(g2), [2, 0, 0, 0, 0])
def test_length_counters_after_two_moves_b(self):
g1 = self.s.create_state((1,1)) # B
g2 = g1.create_state((2,2)) # W
self.assertEquals(get_black_line_counts(g2), [2, 0, 0, 0, 0])
# One across the other diagonal
self.assertEquals(get_white_line_counts(g2), [3, 0, 0, 0, 0])
def test_length_counters_after_five_moves(self):
# along the NE diagonal
g1 = self.s.create_state((1,1)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,3)) # B
g4 = g3.create_state((4,4)) # W
g5 = g4.create_state((0,0)) # B
self.assertEquals(get_black_line_counts(g5), [6, 0, 0, 0, 0])
self.assertEquals(get_white_line_counts(g5), [5, 0, 0, 0, 0])
def test_length_counters_after_five_moves_in_cnrs_and_middle(self):
# four in the corners and one in the middle
g1 = self.s.create_state((0,0)) # B
g2 = g1.create_state((0,4)) # W
g3 = g2.create_state((4,4)) # B
g4 = g3.create_state((4,0)) # W
g5 = g4.create_state((2,2)) # B
self.assertEquals(get_black_line_counts(g5), [2, 0, 1, 0, 0])
self.assertEquals(get_white_line_counts(g5), [0, 0, 0, 0, 0])
def test_make_a_capture(self):
g1 = self.s.create_state((0,1)) # B
g2 = g1.create_state((1,2)) # W
g3 = g2.create_state((1,3)) # B
g4 = g3.create_state((2,3)) # W
g5 = g4.create_state((3,4)) # B
self.assertEquals(g5.to_move_colour(), P2)
self.assertEquals(g5.terminal(), False)
board = g5.board()
self.assertEquals(board.get_occ((0,1)), P1)
self.assertEquals(board.get_occ((1,3)), P1)
self.assertEquals(board.get_occ((3,4)), P1)
self.assertEquals(board.get_occ((1,2)), EMPTY)
self.assertEquals(board.get_occ((2,3)), EMPTY)
class ThreatTest(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(5, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_add_one_take_for_white(self):
g1 = self.s.create_state((2,4)) # B
g2 = g1.create_state((1,4)) # W
g3 = g2.create_state((3,4)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_SW_valid(self):
g1 = self.s.create_state((1,1)) # B
g2 = g1.create_state((3,3)) # W
g3 = g2.create_state((2,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_NW_valid(self):
g1 = self.s.create_state((1,3)) # B
g2 = g1.create_state((3,1)) # W
g3 = g2.create_state((2,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_NE_valid(self):
g1 = self.s.create_state((3,3)) # B
g2 = g1.create_state((1,1)) # W
g3 = g2.create_state((2,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_SE_valid(self):
g1 = self.s.create_state((2,2)) # B
g2 = g1.create_state((1,3)) # W
g3 = g2.create_state((3,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
##########################################
def test_SW_invalid(self):
g1 = self.s.create_state((0,0)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((1,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_NW_invalid(self):
g1 = self.s.create_state((0,4)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((1,3)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_NE_invalid(self):
g1 = self.s.create_state((4,4)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,3)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_SE_invalid(self):
g1 = self.s.create_state((4,0)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
##########################################
def test_W_invalid(self):
g1 = self.s.create_state((0,2)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((1,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_E_invalid(self):
g1 = self.s.create_state((4,2)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_N_invalid(self):
g1 = self.s.create_state((2,4)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((2,3)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_S_invalid(self):
g1 = self.s.create_state((2,0)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((2,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
##########################################
def test_SW_invalid_take2(self):
g1 = self.s.create_state((1,0)) # B
g2 = g1.create_state((3,2)) # W
g3 = g2.create_state((2,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_SW_invalid_threat2(self):
g1 = self.s.create_state((1,0)) # B
g2 = g1.create_state((3,4)) # W (irrel.)
g3 = g2.create_state((2,1)) # B
self.assertEquals(g3.get_threats(), [0, 0, 0])
##########################################
'''
def test_seen(self):
self.s.set_seen(set([(1,2)]))
moves = list(self.s.successors())
'''
"""
# TODO: lots of threat cases, or unify stuff
if __name__ == "__main__":
unittest.main()
|
cropleyb/pentai
|
pentai/ai/t_ab_state.py
|
Python
|
mit
| 13,516
|
# -*- coding: utf-8 -*-
"""
Created on Feb 09, 2018
@author: Tyranic-Moron
"""
from twisted.plugin import IPlugin
from pymoronbot.moduleinterface import IModule
from pymoronbot.modules.commandinterface import BotCommand, admin
from zope.interface import implementer
import re
from collections import OrderedDict
from pymoronbot.response import IRCResponse, ResponseType
@implementer(IPlugin, IModule)
class Admin(BotCommand):
def triggers(self):
return ['admin']
@admin("Only my admins may add new admins!")
def _add(self, message):
"""add <nick/full hostmask> - adds the specified user to the bot admins list.
You can list multiple users to add them all at once.
Nick alone will be converted to a glob hostmask, eg: *!user@host"""
if len(message.ParameterList) < 2:
return IRCResponse(ResponseType.Say,
u"You didn't give me a user to add!",
message.ReplyTo)
for admin in message.ParameterList[1:]:
if message.ReplyTo in self.bot.channels:
if admin in self.bot.channels[message.ReplyTo].Users:
user = self.bot.channels[message.ReplyTo].Users[admin]
admin = u'*!{}@{}'.format(user.User, user.Hostmask)
admins = self.bot.config.getWithDefault('admins', [])
admins.append(admin)
self.bot.config['admins'] = admins
self.bot.config.writeConfig()
return IRCResponse(ResponseType.Say,
u"Added specified users as bot admins!",
message.ReplyTo)
@admin("Only my admins may remove admins!")
def _del(self, message):
"""del <full hostmask> - removes the specified user from the bot admins list.
You can list multiple users to remove them all at once."""
if len(message.ParameterList) < 2:
return IRCResponse(ResponseType.Say,
u"You didn't give me a user to remove!",
message.ReplyTo)
deleted = []
skipped = []
admins = self.bot.config.getWithDefault('admins', [])
for admin in message.ParameterList[1:]:
if message.ReplyTo in self.bot.channels:
if admin in self.bot.channels[message.ReplyTo].Users:
user = self.bot.channels[message.ReplyTo].Users[admin]
admin = u'*!{}@{}'.format(user.User, user.Hostmask)
if admin not in admins:
skipped.append(admin)
continue
admins.remove(admin)
deleted.append(admin)
self.bot.config['admins'] = admins
self.bot.config.writeConfig()
return IRCResponse(ResponseType.Say,
u"Removed '{}' as admin(s), {} skipped"
.format(u', '.join(deleted), len(skipped)),
message.ReplyTo)
def _list(self, message):
"""list - lists all admins"""
owners = self.bot.config.getWithDefault('owners', [])
admins = self.bot.config.getWithDefault('admins', [])
return IRCResponse(ResponseType.Say,
u"Owners: {} | Admins: {}".format(u', '.join(owners),
u', '.join(admins)),
message.ReplyTo)
subCommands = OrderedDict([
(u'add', _add),
(u'del', _del),
(u'list', _list)])
def help(self, query):
"""
@type query: list[str]
@rtype str
"""
if len(query) > 1:
subCommand = query[1].lower()
if subCommand in self.subCommands:
return u'{1}admin {0}'.format(re.sub(r"\s+", u" ", self.subCommands[subCommand].__doc__),
self.bot.commandChar)
else:
return self._unrecognizedSubcommand(subCommand)
else:
return self._helpText()
def _helpText(self):
return u"{1}admin ({0}) - manages users with bot admin permissions. " \
u"Use '{1}help admin <subcommand> for subcommand help.".format(u'/'.join(self.subCommands.keys()),
self.bot.commandChar)
def _unrecognizedSubcommand(self, subCommand):
return u"unrecognized subcommand '{}', " \
u"available subcommands for admin are: {}".format(subCommand, u', '.join(self.subCommands.keys()))
def execute(self, message):
if len(message.ParameterList) > 0:
subCommand = message.ParameterList[0].lower()
if subCommand not in self.subCommands:
return IRCResponse(ResponseType.Say,
self._unrecognizedSubcommand(subCommand),
message.ReplyTo)
return self.subCommands[subCommand](self, message)
else:
return IRCResponse(ResponseType.Say,
self._helpText(),
message.ReplyTo)
adminCommand = Admin()
|
MatthewCox/PyMoronBot
|
pymoronbot/modules/admin/Admin.py
|
Python
|
mit
| 5,227
|
DATA_DIR = '/media/d/ssd2/dstl/'
|
danzelmo/dstl-competition
|
global_vars.py
|
Python
|
mit
| 32
|
import pygame, math
pygame.init()
window = pygame.display.set_mode((600, 600))
pygame.display.set_caption("Fractal Tree")
screen = pygame.display.get_surface()
def drawTree(x1, y1, angle, depth):
if depth:
x2 = x1 + int(math.cos(math.radians(angle)) * depth * 10.0)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * 10.0)
pygame.draw.line(screen, (255,255,255), (x1, y1), (x2, y2), 2)
drawTree(x2, y2, angle - 20, depth - 1)
drawTree(x2, y2, angle + 20, depth - 1)
def input(event):
if event.type == pygame.QUIT:
exit(0)
drawTree(300, 550, -90, 9)
pygame.display.flip()
while True:
input(pygame.event.wait())
|
kantel/Virtuelle-Wunderkammer
|
sources/treepy/tree.py
|
Python
|
mit
| 680
|
import time
from os import system
import bot as cleanBot
def pp(message, mtype='INFO'):
mtype = mtype.upper()
print '[%s] [%s] %s' % (time.strftime('%H:%M:%S', time.gmtime()), mtype, message)
def ppi(channel, message, username):
print '[%s %s] <%s> %s' % (time.strftime('%H:%M:%S', time.gmtime()), channel, username.lower(), message)
def pbot(message, channel=''):
if channel:
msg = '[%s %s] <%s> %s' % (time.strftime('%H:%M:%S', time.gmtime()), channel, 'BOT', message)
else:
msg = '[%s] <%s> %s' % (time.strftime('%H:%M:%S', time.gmtime()), 'BOT', message)
print msg
def pbutton(message_buffer):
#system('clear')
if cleanBot.Bot().botOn == True:
print '\n\n'
print '\n'.join([' {0:<12s} {1:>6s}'.format(message['username'][:12].title(), message['button'].lower()) for message in message_buffer])
else:
print '\n\n'
print '\n'.join('CHAT ENABLED ACTIONS ARE OFF')
print '\n'.join([' {0:<12s} {1:>6s}'.format(message['username'][:12].title(), message['button'].lower()) for message in message_buffer])
|
paulperrone/twitch-IRC
|
lib/misc.py
|
Python
|
mit
| 1,101
|
from kaleidoscope.event import DataEvent
from kaleidoscope.options.option_query import OptionQuery
class OptionChainIterator(object):
def __init__(self, data):
self.data = data
# get all quote dates that can be iterated
self.dates = sorted(data['quote_date'].unique())
# turn list of dates into an iterable
self.dates = iter(self.dates)
def __iter__(self):
return self
def __next__(self):
try:
df = self.data
quote_date = next(self.dates)
# return a data event containing the daily quote for option chains
option_chains = df.loc[df['quote_date'] == quote_date]
# create the data event and return it
return DataEvent(quote_date, OptionQuery(option_chains))
except StopIteration:
raise
|
michaelchu/kaleidoscope
|
kaleidoscope/options/iterator/option_chain.py
|
Python
|
mit
| 849
|
''' While x is less than 10 it will print x and add 1 to that number,
then print it and so on until that condition is false, which is
when x equals 10 '''
condition = input('Enter number: ')
x = int(condition)
print (' ')
print ('While loop:')
while x <= 10:
print (x)
x += 1
# will stop adding 1 when it reaches 11
while x > 10:
print('True')
print (x)
print('Number is higher than 10')
break # otherwise it will print True forever, like this:
# uncomment to run and watch:
'''
while True:
print('infinite')
'''
print (' ')
print ('For Loop: ')
exampleList = [1,6,7,3,6,9,0]
print (' ')
print ('See code for reference')
print (' ')
for thing in exampleList:
print (thing)
print (' ')
print ('For x in range loop:')
print (' ')
for x in range (1,11): # range is not in list, this is separate
print (x)
|
lastralab/Statistics
|
Specialization/Personal/Loops.py
|
Python
|
mit
| 855
|
import balanced
balanced.configure('ak-test-2eKlj1ZDfAcZSARMf3NMhBHywDej0avSY')
debit = balanced.Debit.fetch('/debits/WD5EW7vbyXlTsudIGF5AkrEA')
debit.description = 'New description for debit'
debit.meta = {
'facebook.id': '1234567890',
'anykey': 'valuegoeshere',
}
debit.save()
|
balanced/balanced-python
|
scenarios/debit_update/executable.py
|
Python
|
mit
| 284
|
import re
def getText(data):
res = []
resString = ""
pattern = re.compile(r"\(?M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})\)(.*?)\. *\n", re.I)
iteratable = pattern.finditer(data)
newPattern = re.compile(r"\(?M{0,4}CM|CD|D?C{0,3}XC|XL|L?X{0,3}IX|IV|V?I{0,3}\)", re.I)
checkPattern = re.compile(r"\(?M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})\) *", re.I)
resString = ""
for _ in iteratable:
resString = str(_.group())
if(newPattern.match(resString) == None):
for a in checkPattern.finditer(resString):
resString = resString.replace(a.group(), "")
res.append(resString)
else:
print "notCool"
continue
return res
|
Utkarshdevd/summer14python
|
getText.py
|
Python
|
mit
| 665
|
#!/usr/bin/env python
# Takes apart large IATI XML files and outputs one file per reporting org.
# Copyright 2013 Mark Brough.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License v3.0 as
# published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
from lxml import etree
import unicodecsv
import sys
import os
# FIXME: if there are multiple countries/countries+regions, then don't
# output to the same file.
def segment_file(prefix, filename, output_directory):
print "Segmenting file", filename
doc=etree.parse(os.path.join(filename))
extorgs = set(doc.xpath("//iati-activity/reporting-org/@ref"))
print "Found orgs", list(extorgs)
out = {}
iatiactivities = doc.xpath('//iati-activities')[0]
for org in extorgs:
out[org] = {
'title': prefix.upper() + " Activity file " + org,
'data': etree.Element('iati-activities')
}
for attribute, attribute_value in iatiactivities.items():
out[org]['data'].set(attribute, attribute_value)
activities = doc.xpath('//iati-activity')
for activity in activities:
if (activity.xpath("reporting-org/@ref")) and (activity.xpath("reporting-org/@ref")[0] != ""):
org = activity.xpath("reporting-org/@ref")[0]
out[org]['orgname'] = activity.xpath("reporting-org/text()")[0] if activity.xpath("reporting-org/text()") else ""
out[org]['orgtype'] = activity.xpath("reporting-org/@type")[0] if activity.xpath("reporting-org/@type") else ""
out[org]['data'].append(activity)
# Create metadata file...
fieldnames = ['org', 'orgname', 'orgtype', 'official', 'filename', 'url',
'package_name', 'package_title']
metadata_file = open(output_directory + 'metadata.csv', 'w')
metadata = unicodecsv.DictWriter(metadata_file, fieldnames)
metadata.writeheader()
for org, data in out.items():
print "Writing data for", org
# Check not empty
if data['data'].xpath('//iati-activity'):
d = etree.ElementTree(data['data'])
d.write(output_directory+prefix+"-"+org+".xml",
pretty_print=True,
xml_declaration=True,
encoding="UTF-8")
metadata.writerow({
'org':org,
'orgname':data['orgname'],
'orgtype':data['orgtype'],
'filename':prefix+"-"+org+'.xml',
'package_name': prefix+"-"+org,
'package_title': data['title']})
print "Finished writing data, find the files in", output_directory
metadata_file.close()
if __name__ == '__main__':
arguments = sys.argv
arguments.pop(0)
prefix = arguments[0]
arguments.pop(0)
filenames = arguments
output_directory = 'data/'
if not filenames:
print "No filenames"
else:
for filename in filenames:
segment_file(prefix, filename, output_directory)
|
markbrough/iati-country-tester
|
segment_ro.py
|
Python
|
mit
| 3,383
|