text stringlengths 4 1.02M | meta dict |
|---|---|
from testbase import TestBase
from influxparawrite import InfluxParaWrite
class test_307(TestBase):
def run(self):
self.env.sendSingleMetric(1,self.name,1)
self.env.sendSingleMetric(2,self.name,2)
self.env.sendSingleMetric(3,self.name,3)
if self.env.countMetrics(1,self.name) != 3:
return ["FAIL","node 1 wrong count"]
if self.env.countMetrics(2,self.name) != 3:
return ["FAIL","node 2 wrong count"]
if self.env.countMetrics(3,self.name) != 3:
return ["FAIL","node 3 wrong count"]
self.env.killInflux(3)
self.env.killInflux(2)
self.env.killInflux(1)
self.env.startInflux(1)
self.env.startInflux(2)
ipw = InfluxParaWrite(self.env)
ipw.start(2,3,'',self.name)
self.env.startInflux(3)
val = self.env.countMetrics(1,self.name)
if val != ipw.count+3:
return ["FAIL","node 1 wrong count 2: "+ str(val) + ' != '+str(ipw.count+3)]
val = self.env.countMetrics(2,self.name)
if val != ipw.count+3:
return ["FAIL","node 2 wrong count 2: "+ str(val) + ' != '+str(ipw.count+3)]
val = self.env.countMetrics(3,self.name)
if val != ipw.count+3:
return ["FAIL","node 3 wrong count 2: "+ str(val) + ' != '+str(ipw.count+3)]
return ["PASS",""]
def desc(self):
return 'Shut down nodes 3 -> 1 . Bring back up 2, then 1, and test, writing to 2'
| {
"content_hash": "5ff38c7c9efa058dce9cb3b3542ed7a7",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 89,
"avg_line_length": 43.5,
"alnum_prop": 0.581473968897904,
"repo_name": "hpcloud-mon/monasca-perf",
"id": "b535dd4ae1b4bcf4a0af94c3890f50146dc16320",
"size": "1479",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "influx_test/test_307.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "359971"
},
{
"name": "Shell",
"bytes": "3914"
}
],
"symlink_target": ""
} |
from quadtree import QuadTree
from strtree import STRtree | {
"content_hash": "57462b5ab419cd56f2dcbb6341ea2618",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 29,
"avg_line_length": 28.5,
"alnum_prop": 0.8771929824561403,
"repo_name": "geoscript/geoscript-py",
"id": "58b5576e998c592175a800020d5513a699e3fc2a",
"size": "57",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geoscript/index/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "656"
},
{
"name": "Java",
"bytes": "8264"
},
{
"name": "Python",
"bytes": "262087"
},
{
"name": "Shell",
"bytes": "2173"
}
],
"symlink_target": ""
} |
import re
from gym import error
import glob
# checkpoints/KerasDDPG-InvertedPendulum-v0-20170701190920_actor.h5
weight_save_re = re.compile(r'^(?:\w+\/)+?(\w+-v\d+)-(\w+-v\d+)-(\d+)(?:_\w+)?\.(\w+)$')
def get_fields(weight_save_name):
match = weight_save_re.search(weight_save_name)
if not match:
raise error.Error('Attempted to read a malformed weight save: {}. (Currently all weight saves must be of the form {}.)'.format(id,weight_save_re.pattern))
return match.group(1), match.group(2), int(match.group(3))
def get_latest_save(file_folder, agent_name, env_name, version_number):
"""
Returns the properties of the latest weight save. The information can be used to generate the loading path
:return:
"""
path = "%s%s"% (file_folder, "*.h5")
file_list = glob.glob(path)
latest_file_properties = []
file_properties = []
for f in file_list:
file_properties = get_fields(f)
if file_properties[0] == agent_name and file_properties[1] == env_name and (latest_file_properties == [] or file_properties[2] > latest_file_properties[2]):
latest_file_properties = file_properties
return latest_file_properties
| {
"content_hash": "6d1c02fec8bb8df3b9642f7ce2b194fc",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 158,
"avg_line_length": 42,
"alnum_prop": 0.6948853615520282,
"repo_name": "benelot/bullet-gym",
"id": "1ccedce5ccd0f5715cdfa97c6c6277426bf85885",
"size": "1135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pybulletgym/envs/kerasrl_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "718069"
},
{
"name": "Shell",
"bytes": "190"
}
],
"symlink_target": ""
} |
from builtins import str
import logging
from bq_data_access.v1.gnab_data import GNABFeatureProvider
from bq_data_access.v1.utils import DurationLogged
from django.conf import settings
SEQPEEK_FEATURE_TYPE = 'SEQPEEK'
class SeqPeekDataProvider(GNABFeatureProvider):
def __init__(self, feature_id, **kwargs):
super(SeqPeekDataProvider, self).__init__(feature_id, **kwargs)
@classmethod
def process_data_point(cls, data_point):
return str(data_point['value'])
def build_query(self, project_name, dataset_name, table_name, feature_def, cohort_dataset, cohort_table, cohort_id_array, project_id_array):
cohort_project_name=settings.GCLOUD_PROJECT_ID
# Generate the 'IN' statement string: (%s, %s, ..., %s)
cohort_id_stmt = ', '.join([str(cohort_id) for cohort_id in cohort_id_array])
project_id_stmt = ''
if project_id_array is not None:
project_id_stmt = ', '.join([str(project_id) for project_id in project_id_array])
query_template = \
("SELECT ParticipantBarcode, Tumor_SampleBarcode, Tumor_AliquotBarcode, "
" Hugo_symbol, "
" UniProt_AApos, "
" variant_classification, "
" HGNC_UniProt_ID_Supplied_By_UniProt as uniprot_id "
"FROM [{project_name}:{dataset_name}.{table_name}] "
"WHERE Hugo_Symbol='{gene}' "
"AND Tumor_SampleBarcode IN ( "
" SELECT sample_barcode "
" FROM [{cohort_project_name}:{cohort_dataset}.{cohort_table}] "
" WHERE cohort_id IN ({cohort_id_list})"
" AND (project_id IS NULL")
query_template += (" OR project_id IN ({project_id_list})))" if project_id_array is not None else "))")
query = query_template.format(dataset_name=dataset_name, project_name=project_name, table_name=table_name,
gene=feature_def.gene, cohort_project_name=cohort_project_name,
cohort_dataset=cohort_dataset, cohort_table=cohort_table,
cohort_id_list=cohort_id_stmt, project_id_list=project_id_stmt)
logging.debug("BQ_QUERY_SEQPEEK: " + query)
return query
@DurationLogged('SEQPEEK_GNAB', 'UNPACK')
def unpack_query_response(self, query_result_array):
result = []
skip_count = 0
for row in query_result_array:
result.append({
'case_id': row['f'][0]['v'],
'sample_id': row['f'][1]['v'],
'aliquot_id': row['f'][2]['v'],
'hugo_symbol': row['f'][3]['v'],
'uniprot_aapos': row['f'][4]['v'],
'variant_classification': row['f'][5]['v'],
'uniprot_id': row['f'][6]['v'],
})
logging.debug("Query result is {qrows} rows, skipped {skipped} rows".format(qrows=len(query_result_array),
skipped=skip_count))
return result
| {
"content_hash": "cdda9f5c5b0707e17c87a8d36c2264fe",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 144,
"avg_line_length": 43.80281690140845,
"alnum_prop": 0.5569131832797428,
"repo_name": "isb-cgc/ISB-CGC-Webapp",
"id": "b2b1e97a87e484398e4dad2933948f730ad9263d",
"size": "3717",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bq_data_access/v1/seqpeek_maf_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "689478"
},
{
"name": "Dockerfile",
"bytes": "3046"
},
{
"name": "HTML",
"bytes": "908979"
},
{
"name": "JavaScript",
"bytes": "5400633"
},
{
"name": "Python",
"bytes": "1348714"
},
{
"name": "Sass",
"bytes": "156499"
},
{
"name": "Shell",
"bytes": "47211"
}
],
"symlink_target": ""
} |
import mock
import six
from heat.common import exception as exc
from heat.common import template_format
from heat.engine.resources.openstack.heat import software_component as sc
from heat.engine import stack
from heat.engine import template
from heat.tests import common
from heat.tests import utils
class SoftwareComponentTest(common.HeatTestCase):
def setUp(self):
super(SoftwareComponentTest, self).setUp()
self.ctx = utils.dummy_context()
tpl = '''
heat_template_version: 2013-05-23
resources:
mysql_component:
type: OS::Heat::SoftwareComponent
properties:
configs:
- actions: [CREATE]
config: |
#!/bin/bash
echo "Create MySQL"
tool: script
- actions: [UPDATE]
config: |
#!/bin/bash
echo "Update MySQL"
tool: script
inputs:
- name: mysql_port
outputs:
- name: root_password
'''
self.template = template_format.parse(tpl)
self.stack = stack.Stack(
self.ctx, 'software_component_test_stack',
template.Template(self.template))
self.component = self.stack['mysql_component']
self.rpc_client = mock.MagicMock()
self.component._rpc_client = self.rpc_client
def test_resource_mapping(self):
mapping = sc.resource_mapping()
self.assertEqual(1, len(mapping))
self.assertEqual(sc.SoftwareComponent,
mapping['OS::Heat::SoftwareComponent'])
self.assertIsInstance(self.component, sc.SoftwareComponent)
def test_handle_create(self):
config_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
value = {'id': config_id}
self.rpc_client.create_software_config.return_value = value
props = dict(self.component.properties)
self.component.handle_create()
self.rpc_client.create_software_config.assert_called_with(
self.ctx,
group='component',
name=None,
inputs=props['inputs'],
outputs=props['outputs'],
config={'configs': props['configs']},
options=None)
self.assertEqual(config_id, self.component.resource_id)
def test_handle_delete(self):
self.resource_id = None
self.assertIsNone(self.component.handle_delete())
config_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
self.component.resource_id = config_id
self.rpc_client.delete_software_config.return_value = None
self.assertIsNone(self.component.handle_delete())
self.rpc_client.delete_software_config.side_effect = exc.NotFound
self.assertIsNone(self.component.handle_delete())
def test_resolve_attribute(self):
self.assertIsNone(self.component._resolve_attribute('others'))
self.component.resource_id = None
self.assertIsNone(self.component._resolve_attribute('configs'))
self.component.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
configs = self.template['resources']['mysql_component'
]['properties']['configs']
# configs list is stored in 'config' property of SoftwareConfig
value = {'config': {'configs': configs}}
self.rpc_client.show_software_config.return_value = value
self.assertEqual(configs, self.component._resolve_attribute('configs'))
self.rpc_client.show_software_config.side_effect = exc.NotFound
self.assertIsNone(self.component._resolve_attribute('configs'))
class SoftwareComponentValidationTest(common.HeatTestCase):
scenarios = [
(
'component_full',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
- actions: [CREATE]
config: |
#!/bin/bash
echo CREATE $foo
tool: script
inputs:
- name: foo
outputs:
- name: bar
options:
opt1: blah
''',
err=None,
err_msg=None)
),
(
'no_input_output_options',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
- actions: [CREATE]
config: |
#!/bin/bash
echo CREATE $foo
tool: script
''',
err=None,
err_msg=None)
),
(
'wrong_property_config',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
config: #!/bin/bash
configs:
- actions: [CREATE]
config: |
#!/bin/bash
echo CREATE $foo
tool: script
''',
err=exc.StackValidationFailed,
err_msg='Unknown Property config')
),
(
'missing_configs',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
inputs:
- name: foo
''',
err=exc.StackValidationFailed,
err_msg='Property configs not assigned')
),
(
'empty_configs',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
''',
err=exc.StackValidationFailed,
err_msg='resources.component.properties.configs: '
'length (0) is out of range (min: 1, max: None)')
),
(
'invalid_configs',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
actions: [CREATE]
config: #!/bin/bash
tool: script
''',
err=exc.StackValidationFailed,
err_msg='is not a list')
),
(
'config_empty_actions',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
- actions: []
config: #!/bin/bash
tool: script
''',
err=exc.StackValidationFailed,
err_msg='component.properties.configs[0].actions: '
'length (0) is out of range (min: 1, max: None)')
),
(
'multiple_configs_per_action_single',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
- actions: [CREATE]
config: #!/bin/bash
tool: script
- actions: [CREATE]
config: #!/bin/bash
tool: script
''',
err=exc.StackValidationFailed,
err_msg='Defining more than one configuration for the same '
'action in SoftwareComponent "component" is not '
'allowed.')
),
(
'multiple_configs_per_action_overlapping_list',
dict(snippet='''
component:
type: OS::Heat::SoftwareComponent
properties:
configs:
- actions: [CREATE, UPDATE, RESUME]
config: #!/bin/bash
tool: script
- actions: [UPDATE]
config: #!/bin/bash
tool: script
''',
err=exc.StackValidationFailed,
err_msg='Defining more than one configuration for the same '
'action in SoftwareComponent "component" is not '
'allowed.')
),
]
def setUp(self):
super(SoftwareComponentValidationTest, self).setUp()
self.ctx = utils.dummy_context()
tpl = '''
heat_template_version: 2013-05-23
resources:
%s
''' % self.snippet
self.template = template_format.parse(tpl)
self.stack = stack.Stack(
self.ctx, 'software_component_test_stack',
template.Template(self.template))
self.component = self.stack['component']
self.component._rpc_client = mock.MagicMock()
def test_properties_schema(self):
if self.err:
err = self.assertRaises(self.err, self.stack.validate)
if self.err_msg:
self.assertIn(self.err_msg, six.text_type(err))
else:
self.assertIsNone(self.stack.validate())
| {
"content_hash": "1daa061c4f259dcad035a02dc86a6e7d",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 79,
"avg_line_length": 36.088235294117645,
"alnum_prop": 0.4721882640586797,
"repo_name": "dims/heat",
"id": "d59044768560a6fb1229bf2a699635af2aafbcd3",
"size": "10391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/tests/openstack/heat/test_software_component.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7618889"
},
{
"name": "Shell",
"bytes": "32548"
}
],
"symlink_target": ""
} |
from corehq.apps.app_manager.models import Application
from corehq.apps.userreports.specs import TypeProperty
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors, FormAccessors
from dimagi.ext.jsonobject import JsonObject, StringProperty
STATUSES = {
(1, 0): "Improved",
(0, 1): "**Declined**",
(1, 1): "Satisfactory",
(0, 0): "**Needs improvement**",
(99, 0): "**Needs improvement**",
(0, 99): "**Needs improvement**",
(99, 1): "Satisfactory",
(1, 99): "Satisfactory",
(99, 99): "Other",
}
def get_val(form, path, default=0):
if not form:
return default
question_value = form.get_data(path)
try:
return int(question_value)
except (ValueError, TypeError):
return default
def get_yes_no(yes, no):
if yes:
return 'Yes'
elif no:
return 'No'
else:
return 'N/A'
class EQAExpressionSpec(JsonObject):
type = TypeProperty('eqa_expression')
question_id = StringProperty()
tally_yes_id = StringProperty()
tally_no_id = StringProperty()
display_text = StringProperty()
xmlns = StringProperty()
def __call__(self, item, context=None):
xforms_ids = CaseAccessors(item['domain']).get_case_xform_ids(item['_id'])
forms = FormAccessors(item['domain']).get_forms(xforms_ids)
f_forms = [f for f in forms if f.xmlns == self.xmlns]
s_forms = sorted(f_forms, key=lambda x: x.received_on)
if len(s_forms) >= 2:
curr_form = s_forms[-1]
prev_form = s_forms[-2]
elif len(s_forms) == 1:
curr_form = s_forms[-1]
prev_form = None
else:
curr_form = None
prev_form = None
path_question = 'form/%s' % self.question_id
path_yes = 'form/%s' % self.tally_yes_id
path_no = 'form/%s' % self.tally_no_id
curr_ques = get_val(curr_form, path_question, 99)
curr_sub_yes = get_val(curr_form, path_yes)
curr_sub_no = get_val(curr_form, path_no)
prev_ques = get_val(prev_form, path_question, 99)
prev_sub_yes = get_val(prev_form, path_yes)
prev_sub_no = get_val(prev_form, path_no)
return {
'question_id': self.question_id,
'display_text': self.display_text,
'current_submission': get_yes_no(curr_sub_yes, curr_sub_no),
'previous_submission': get_yes_no(prev_sub_yes, prev_sub_no),
'status': STATUSES.get((curr_ques, prev_ques))
}
class EQAActionItemSpec(JsonObject):
type = TypeProperty('cqi_action_item')
xmlns = StringProperty()
section = StringProperty()
question_id = StringProperty()
def __call__(self, item, context=None):
xforms_ids = CaseAccessors(item['domain']).get_case_xform_ids(item['_id'])
forms = FormAccessors(item['domain']).get_forms(xforms_ids)
f_forms = [f for f in forms if f.xmlns == self.xmlns]
s_forms = sorted(f_forms, key=lambda x: x.received_on)
if len(s_forms) > 0:
latest_form = s_forms[-1]
else:
latest_form = None
path_to_action_plan = 'form/action_plan/%s/action_plan' % self.section
if latest_form:
action_plans = latest_form.get_data(path_to_action_plan)
if action_plans:
action_plan_for_question = None
for action_plan in action_plans:
if action_plan.get('incorrect_questions', '') == self.question_id:
action_plan_for_question = action_plan
break
if action_plan_for_question:
incorrect_question = action_plan_for_question.get('incorrect_questions', '')
responsible = ', '.join(
[
item.get(x.strip(), '---') for x in
action_plan_for_question.get('action_plan_input', {}).get('responsible', '').split(',')
]
)
support = ', '.join(
[
item.get(x.strip(), '---') for x in
action_plan_for_question.get('action_plan_input', {}).get('support', '').split(',')
]
)
application = Application.get(latest_form.app_id)
form = application.get_form_by_xmlns(self.xmlns)
question_list = application.get_questions(self.xmlns)
questions = {x['value']: x for x in question_list}
return {
'form_name': form.name['en'],
'section': self.section,
'timeEnd': latest_form.metadata.timeEnd,
'gap': questions.get('data/code_to_text/%s' % incorrect_question, {}).get('label', '---'),
'intervention_action': action_plan_for_question.get('intervention_action', '---'),
'responsible': responsible,
'support': support,
'deadline': action_plan_for_question.get('DEADLINE', '---'),
'notes': action_plan_for_question.get('notes', '---'),
}
def eqa_expression(spec, context):
wrapped = EQAExpressionSpec.wrap(spec)
return wrapped
def cqi_action_item(spec, context):
wrapped = EQAActionItemSpec.wrap(spec)
return wrapped
| {
"content_hash": "471769e25901235c069131f6927626a0",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 115,
"avg_line_length": 37.70068027210884,
"alnum_prop": 0.5364489354023818,
"repo_name": "qedsoftware/commcare-hq",
"id": "56dfb486d00bc577d9900e3fafa689db2a63b6ad",
"size": "5542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "custom/eqa/expressions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
} |
"""
=========================================
SVM: Maximum margin separating hyperplane
=========================================
Plot the maximum margin separating hyperplane within a two-class
separable dataset using a Support Vector Machine classifier with
linear kernel.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
from sklearn.datasets import make_blobs
# we create 40 separable points
X, y = make_blobs(n_samples=40, centers=2, random_state=6)
# fit the model, don't regularize for illustration purposes
clf = svm.SVC(kernel='linear', C=1000)
clf.fit(X, y)
plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)
# plot the decision function
ax = plt.gca()
xlim = ax.get_xlim()
ylim = ax.get_ylim()
# create grid to evaluate model
xx = np.linspace(xlim[0], xlim[1], 30)
yy = np.linspace(ylim[0], ylim[1], 30)
YY, XX = np.meshgrid(yy, xx)
xy = np.vstack([XX.ravel(), YY.ravel()]).T
Z = clf.decision_function(xy).reshape(XX.shape)
# plot decision boundary and margins
ax.contour(XX, YY, Z, colors='k', levels=[-1, 0, 1], alpha=0.5,
linestyles=['--', '-', '--'])
# plot support vectors
ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=100,
linewidth=1, facecolors='none')
plt.show()
| {
"content_hash": "463fe8541ed6ac910fa05e1317ac9523",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 73,
"avg_line_length": 28.711111111111112,
"alnum_prop": 0.641640866873065,
"repo_name": "clemkoa/scikit-learn",
"id": "9fdbcc785ed2bd55f2ab0e9e47b2b2e108b1052c",
"size": "1292",
"binary": false,
"copies": "25",
"ref": "refs/heads/master",
"path": "examples/svm/plot_separating_hyperplane.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "451996"
},
{
"name": "C++",
"bytes": "140322"
},
{
"name": "Makefile",
"bytes": "1512"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "7322224"
},
{
"name": "Shell",
"bytes": "20749"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import unittest
from xosgenx.generator import XOSProcessor, XOSProcessorArgs
from helpers import XProtoTestHelpers
def policy_output_enforcer(x, y):
"""
eliminating warnings arising due to the missing policy_output_enforcer,
which is generated and loaded dynamically.
"""
raise Exception("Security enforcer not generated. Test failed.")
return False
class XProtoXOSSecurityTest(unittest.TestCase):
"""
Use the Python code target to generate Python security policies, set up an
appropriate environment and execute the Python. The security policies here
deliberately made complex in order to stress the processor.
"""
def setUp(self):
self.target = XProtoTestHelpers.write_tmp_target(
"{{ xproto_fol_to_python_test('output',proto.policies.test_policy, None, '0') }}"
)
"""
This is the security policy for controllers
"""
def test_controller_policy(self):
xproto = """
policy test_policy < ctx.user.is_admin | exists Privilege: Privilege.accessor_id = ctx.user.id & Privilege.object_type = "Deployment" & Privilege.permission = "role:admin" & Privilege.object_id = obj.id >
"""
args = XOSProcessorArgs()
args.inputs = xproto
args.target = self.target
output = XOSProcessor.process(args)
exec(output) # This loads the generated function, which should look like this:
"""
def policy_output_enforcer(obj, ctx):
i2 = ctx.user.is_admin
i3 = Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(object_type='Deployment'), Q(permission='role:admin'), Q(object_id=obj.id))[0]
i1 = (i2 or i3)
return i1
"""
# FIXME: Test this policy by executing it
self.assertTrue(policy_output_enforcer is not None)
"""
This is the security policy for ControllerNetworks
"""
def test_controller_network_policy(self):
xproto = """
policy test_policy <
ctx.user.is_admin
| (exists Privilege:
Privilege.accessor_id = ctx.user.id
& Privilege.accessor_type = "User"
& Privilege.object_type = "Slice"
& Privilege.object_id = obj.owner.id)
| (exists Privilege:
Privilege.accessor_id = ctx.user.id
& Privilege.accessor_type = "User"
& Privilege.object_type = "Site"
& Privilege.object_id = obj.owner.site.id
& Privilege.permission = "role:admin") >
"""
args = XOSProcessorArgs()
args.inputs = xproto
args.target = self.target
output = XOSProcessor.process(args)
exec(output) # This loads the generated function, which should look like this:
"""
def policy_output_enforcer(obj, ctx):
i2 = ctx.user.is_admin
i4 = Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.owner.id))[0]
i5 = Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Site'), Q(object_id=obj.owner.site.id), Q(permission='role:admin'))[0]
i3 = (i4 or i5)
i1 = (i2 or i3)
return i1
"""
# FIXME: Test this policy by executing it
self.assertTrue(policy_output_enforcer is not None)
"""
This is the security policy for Slices
"""
def test_slice_policy(self):
xproto = """
policy site_policy <
ctx.user.is_admin
| (ctx.write_access -> exists Privilege: Privilege.object_type = "Site" & Privilege.object_id = obj.id & Privilege.accessor_id = ctx.user.id & Privilege.permission_id = "role:admin") >
policy test_policy <
ctx.user.is_admin
| (*site_policy(site)
& ((exists Privilege:
Privilege.accessor_id = ctx.user.id
& Privilege.accessor_type = "User"
& Privilege.object_type = "Slice"
& Privilege.object_id = obj.id
& (ctx.write_access->Privilege.permission="role:admin"))
| (exists Privilege:
Privilege.accessor_id = ctx.user.id
& Privilege.accessor_type = "User"
& Privilege.object_type = "Site"
& Privilege.object_id = obj.site.id
& Privilege.permission = "role:admin"))
)>
"""
args = XOSProcessorArgs()
args.inputs = xproto
args.target = self.target
output = XOSProcessor.process(args)
exec(output) # This loads the generated function, which should look like this:
"""
def policy_output_enforcer(obj, ctx):
i2 = ctx.user.is_admin
i4 = policy_site_policy_enforcer(obj.site, ctx)
i10 = ctx.write_access
i11 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.id), Q(permission='role:admin'))))
i8 = (i10 and i11)
i14 = ctx.write_access
i12 = (not i14)
i13 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Slice'), Q(object_id=obj.id))))
i9 = (i12 and i13)
i6 = (i8 or i9)
i7 = (not (not Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(object_type='Site'), Q(object_id=obj.site.id), Q(permission='role:admin'))))
i5 = (i6 or i7)
i3 = (i4 and i5)
i1 = (i2 or i3)
return i1
"""
# FIXME: Test this policy by executing it
self.assertTrue(policy_output_enforcer is not None)
"""
This is the security policy for Users
"""
def test_user_policy(self):
xproto = """
policy test_policy <
ctx.user.is_admin
| ctx.user.id = obj.id
| (exists Privilege:
Privilege.accessor_id = ctx.user.id
& Privilege.accessor_type = "User"
& Privilege.permission = "role:admin"
& Privilege.object_type = "Site"
& Privilege.object_id = ctx.user.site.id) >
"""
args = XOSProcessorArgs()
args.inputs = xproto
args.target = self.target
output = XOSProcessor.process(args)
exec(output) # This loads the generated function, which should look like this:
"""
def policy_output_enforcer(obj, ctx):
i2 = ctx.user.is_admin
i4 = (ctx.user.id == obj.id)
i5 = Privilege.objects.filter(Q(accessor_id=ctx.user.id), Q(accessor_type='User'), Q(permission='role:admin'), Q(object_type='Site'), Q(object_id=ctx.user.site.id))[0]
i3 = (i4 or i5)
i1 = (i2 or i3)
return i1
"""
# FIXME: Test this policy by executing it
self.assertTrue(policy_output_enforcer is not None)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "3d716d648fc3f1370322637b2f418d06",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 208,
"avg_line_length": 36.64248704663213,
"alnum_prop": 0.5866798642533937,
"repo_name": "opencord/xos",
"id": "b75f3c78677e523b76b7d0daf0fb7a98b576ae49",
"size": "7667",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/xos-genx/xos-genx-tests/test_xos_security.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "5024"
},
{
"name": "Makefile",
"bytes": "13624"
},
{
"name": "Python",
"bytes": "1329912"
},
{
"name": "Shell",
"bytes": "57651"
},
{
"name": "Smarty",
"bytes": "3161"
}
],
"symlink_target": ""
} |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostDhcpServiceConfig(vim, *args, **kwargs):
'''This data object type describes the configuration of a DHCP service instance
representing both the configured properties on the instance and identification
information.'''
obj = vim.client.factory.create('ns0:HostDhcpServiceConfig')
# do some validation checking...
if (len(args) + len(kwargs)) < 2:
raise IndexError('Expected at least 3 arguments got: %d' % len(args))
required = [ 'key', 'spec' ]
optional = [ 'changeOperation', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| {
"content_hash": "fc31fac1ea17b87e4d45507cad40ba2e",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 124,
"avg_line_length": 33.88235294117647,
"alnum_prop": 0.6206597222222222,
"repo_name": "xuru/pyvisdk",
"id": "2a6a5d4739c3abb05c6e7343109237815af00620",
"size": "1153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyvisdk/do/host_dhcp_service_config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "369"
},
{
"name": "Python",
"bytes": "3037849"
},
{
"name": "Shell",
"bytes": "4517"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import logging
from django.utils import timezone
from rest_framework import serializers
from .models import Driver, Order
from .tasks import find_drivers
logger = logging.getLogger()
class DriverSerializer(serializers.ModelSerializer):
'''
Driver
'''
class Meta:
model = Driver
fields = (
'id', 'lat', 'lon',
)
def update(self, instance, validated_data):
instance = super(DriverSerializer, self).update(
instance, validated_data)
instance.is_ready = True
logger.info('Driver %s is ready for work' % instance)
return instance
class OrderSerializer(serializers.ModelSerializer):
'''
Order
'''
time = serializers.DateTimeField(required=False, default=timezone.now)
class Meta:
model = Order
fields = (
'id', 'client', 'time',
'lat', 'lon',
)
def save(self, *args, **kwargs):
res = super(OrderSerializer, self).save(*args, **kwargs)
logger.info('Received new order')
find_drivers.delay()
return res
| {
"content_hash": "6b33f82a18316562f167166f4cdcc3c9",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 74,
"avg_line_length": 21.01818181818182,
"alnum_prop": 0.606401384083045,
"repo_name": "rombr/agile-fusion-test-task",
"id": "e8a2ad84e34e3c964a77eef504358a3e272ab203",
"size": "1180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taxi/api/serializers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15049"
}
],
"symlink_target": ""
} |
"""
test_arpspoof
----------------------------------
Tests for `arpspoof` module.
"""
import unittest
from arpspoof import arpspoof
class TestArpspoof(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | {
"content_hash": "cbf7ab98597738f205af8799acbb432c",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 38,
"avg_line_length": 13.6,
"alnum_prop": 0.5529411764705883,
"repo_name": "byt3bl33d3r/arpspoof",
"id": "2de84fc83420ce3a7d49119c8f2b1ac54306f76a",
"size": "387",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_arpspoof.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1699"
},
{
"name": "Python",
"bytes": "6023"
}
],
"symlink_target": ""
} |
from datetime import datetime, timedelta
import argparse
import pytest
from ding import ding
def test_time_parser_relative_1s():
parser = ding.TimeParser(['1s'], relative=True)
assert parser.get_seconds() == 1
def test_time_parser_relative_1m():
parser = ding.TimeParser(['1m'], relative=True)
assert parser.get_seconds() == 60
def test_time_parser_relative_1h():
parser = ding.TimeParser(['1h'], relative=True)
assert parser.get_seconds() == 60 * 60
def test_time_parser_relative_1h_30m():
parser = ding.TimeParser(['1h', '30m'], relative=True)
assert parser.get_seconds() == 60 * 60 + 30 * 60
def test_time_parser_relative_1h_30m():
parser = ding.TimeParser(['1h', '30m'], relative=True)
assert parser.get_seconds() == 60 * 60 + 30 * 60
def test_time_parser_relative_1h_30m_10s():
parser = ding.TimeParser(['1h', '30m', '10s'], relative=True)
assert parser.get_seconds() == 60 * 60 + 30 * 60 + 10
def test_time_parser_absolute_10s():
new_time = str((datetime.now() + timedelta(seconds=10)).time()).split('.')[0]
parser = ding.TimeParser(new_time, relative=False)
assert abs(parser.get_seconds() - 10) < 2
def test_time_parser_absolute_1h():
new_hour = str((datetime.now() + timedelta(hours=1)).hour)
parser = ding.TimeParser(new_hour, relative=False)
assert 0 < parser.get_seconds() < 60 * 60
def test_time_parser_absolute_5m():
new_time = ':'.join(str((datetime.now() + timedelta(minutes=5)).time()).split(':')[:2])
parser = ding.TimeParser(new_time, relative=False)
assert 60 * 4 <= parser.get_seconds() < 60 * 6
def test_relative_time_regex_very_wrong_regex():
with pytest.raises(argparse.ArgumentTypeError)as excinfo:
ding.relative_time('this is very wrong')
def test_relative_time_regex_1s():
assert ding.relative_time('1s')
def test_relative_time_regex_h_m_s():
assert ding.relative_time('12h 12m 34s')
def test_relative_time_regex_extra_space():
assert ding.relative_time('12h 12m')
def test_absolute_time_hh_mm_ss():
assert ding.absolute_time('12:12:12')
def test_absolute_time_hh():
assert ding.absolute_time('12')
def test_absolute_time_hh_mm_ss_invalid_hour():
with pytest.raises(argparse.ArgumentTypeError)as excinfo:
assert ding.absolute_time('32:12:12')
def test_beep_with_custom_command(capfd):
ding.beep(1, 'echo "test"')
out, err = capfd.readouterr()
assert out == 'test\n' * ding.N_BEEPS
| {
"content_hash": "b1fac21641c4fb3776eba6f2034d1495",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 91,
"avg_line_length": 34.43055555555556,
"alnum_prop": 0.6680112948769665,
"repo_name": "liviu-/ding",
"id": "75b3dfe2690d0992c298fa6a30c6524e72109425",
"size": "2479",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/test_functions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11025"
}
],
"symlink_target": ""
} |
import ness6rest as nessrest
import os
import re
import sys
import time
import datetime
class Scan(object):
'''
Scan interface
'''
def __init__(self,name,
scanner=None,
url="",
login="",
password="",
insecure="",
template=""):
if scanner:
self.scanner = scanner
else:
self.scanner = nessrest.Scanner(url=url,
login=login,
password=password,
insecure=insecure)
self.name = name
self.scan_id = ""
self.scanner_id = "1"
self.folder_id = ""
self.uuid = ""
self.category = ""
self.settings = {"launch":"ONETIME",
"enabled":False,
"launch_now":True,
"text_targets":"",
"file_targets":""}
self.audits = {}
self.creds = {}
self.uploads = []
self.categories = {}
self._cache = {}
if template:
self.set_scan_template(template)
if self.scanner.scan_exists(name):
self.get_scan_settings(self.scanner.scan_id)
def submit(self):
self.settings["name"] = self.name
self.settings["scanner_id"] = self.scanner_id
self.settings["folder_id"] = self.folder_id
extra = {"uuid":self.uuid,
"settings":self.settings}
if self.audits:
extra.update({"audits":self.audits})
if self.creds:
extra.update({"credentials":self.creds})
for filename in self.uploads:
self.scanner.upload(filename)
if self.scan_id:
self.scanner.action(action="scans/"+str(self.scan_id),
method="put",
extra=extra)
if "error" in self.scanner.res:
self._error(self.scanner.res["error"])
self.scanner.action(action="scans/"+str(self.scan_id)+
"/launch",
method="post")
if "error" in self.scanner.res:
self._error(self.scanner.res["error"])
self.get_scan_settings(self.scan_id)
else:
self.scanner.action(action="scans",
method="post",
extra=extra)
if "error" in self.scanner.res:
self._error(self.scanner.res["error"])
self.scan_id = self.scanner.res["scan"]["id"]
self.get_scan_settings(self.scan_id)
def is_running(self):
self.get_scan_info()
return self.info["status"] == "running"
def is_completed(self):
self.get_scan_info()
return self.info["status"] == "completed"
def get_scan_info(self,scan_id=""):
if scan_id:
self.scan_id = scan_id
self.scanner.action("scans/"+str(self.scan_id),
method="get")
if "info" in self.scanner.res:
self.info = self.scanner.res["info"]
def get_scan_settings(self,scan_id=""):
if scan_id:
self.scan_id = scan_id
self.scanner.action(action="editor/scan/"+str(scan_id),
method="get")
self._cache["scan"] = self.scanner.res
self.uuid = self._cache["scan"]["uuid"]
def set_scan_template(self,name):
self.scanner.action(action="editor/scan/templates", method="get")
for template in self.scanner.res["templates"]:
for key in template:
if template[key] == name:
self.uuid = template["uuid"]
if self.uuid:
self.scanner.action(action="editor/scan/templates/"+
self.uuid,
method="get")
self.settings.update(self._find_inputs(self.scanner.res))
self._cache["template"] = self.scanner.res
def get_scan_template_names(self):
results = {}
self.scanner.action(action="editor/scan/templates", method="get")
for template in self.scan.res["templates"]:
results[template["name"]] = template["title"]
return results
def set_compliance_category(self, name):
categories = self.get_compliance_categories()
# Try full word match first
for category in categories.keys():
if re.search("^"+name.lower()+"$", category.lower()):
self.category = category
return
# Try contains match
for category in categories.keys():
if re.search(name.lower(), category.lower()):
self.category = category
return
def get_compliance_categories(self):
if self.categories:
return self.categories
if not "template" in self._cache:
self._error("Template must be set before categories.")
for item in self._cache["template"]["compliance"]["data"]:
self.categories[item["name"]] = None
if "offline_allowed" in item.keys() and item["offline_allowed"]:
inputs = self._find_inputs(item)
for key in inputs.keys():
self.categories[item["name"]] = key
return self.categories
def set_targets(self,targets):
if type(targets) == list:
self.settings["text_targets"] = ",".join(targets)
else:
self.settings["text_targets"] = targets
def add_audit_file(self,filename):
if not self.category:
self._error("Plugin must be set before adding audit file.")
self._verify_custom_audit_action("add")
self.audits["custom"]["add"].append({"category":self.category,
"file":os.path.basename(filename)
})
self.uploads.append(filename)
def remove_all_audit_files(self):
if not "scan" in self._cache or not "compliance" in self._cache["scan"]:
return
self._verify_custom_audit_action("delete")
for record in self._cache["scan"]["compliance"]["data"]:
for audit in record["audits"]:
if audit["type"] == "custom" and "id" in audit:
self.audits["custom"]["delete"].append(audit["id"])
def add_config_file(self,filename):
if not self.category:
self._error("Plugin must be set before adding config file.")
if self.categories[self.category]:
self.settings[self.categories[self.category]] = filename
self.uploads.append(filename)
def _verify_custom_audit_action(self,action="add"):
if not self.audits:
self.audits = {"custom":{action:[]}}
if "custom" not in self.audits:
self.audits["custom"] = {action:[]}
if action not in self.audits["custom"]:
self.audits["custom"][action] = []
def add_credential(self,cred):
self._verify_credential_action("add",cred.category,cred.name)
self.creds["add"][cred.category][cred.name].append(cred.__dict__)
def remove_all_credentials(self):
if not "scan" in self._cache or not "credentials" in self._cache["scan"]:
return
self._verify_credential_action("delete")
for record in self._cache["scan"]["credentials"]["data"]:
for item in record["types"]:
for instance in item["instances"]:
self.creds["delete"].append(instance["id"])
def _verify_credential_action(self,action="add",category="",name=""):
if action == "delete" and not self.creds:
self.creds = {action:[]}
else:
if not self.creds:
self.creds = {action:{category:{name:[]}}}
if action not in self.creds:
self.creds[action] = {category:{name:[]}}
if category not in self.creds[action]:
self.creds[action][category] = {name:[]}
if name not in self.creds[action][category]:
self.creds[action][category][name] = []
def set_folder(self,name):
# Find folder by name
self.scanner.action(action="folders", method="get")
for folder in self.scanner.res["folders"]:
if folder["name"] == name:
self.folder_id = folder["id"]
break
# Create if does not exist
if not self.folder_id:
self.scanner.action("folders",
method="post",
extra={"name": name})
self.folder_id = self.scanner.res["id"]
def download_scan(self,filename,export_format="nessus"):
self.scanner.action("scans/"+str(self.scan_id),
method="get")
extra = {"format": export_format}
self.scanner.action("scans/"+str(self.scan_id)+"/export",
method="post",
extra=extra)
file_id = self.scanner.res["file"]
while self._export_in_progress(file_id):
time.sleep(2)
dl_url = "scans/"+str(self.scan_id)+"/export/"+str(file_id)+"/download"
content = self.scanner.action(dl_url, method="get", download=True)
with open(filename,"w") as out_file:
out_file.write(content)
def _export_in_progress(self,file_id):
url = "scans/"+str(self.scan_id)+"/export/"+str(file_id)+"/status"
self.scanner.action(url,method="get")
return self.scanner.res["status"] != "ready"
def _find_inputs(self,obj):
result = {}
if type(obj) is dict:
if "inputs" in obj and obj["inputs"]:
result.update(self._extract_inputs(obj["inputs"]))
for key in obj:
result.update(self._find_inputs(obj[key]))
elif type(obj) is list:
for item in obj:
result.update(self._find_inputs(item))
return result
def _extract_inputs(self,inputs):
result = {}
for item in inputs:
key = ""
kind = ""
value = ""
if "id" in item:
key = item["id"]
if "type" in item:
kind = item["type"]
if "default" in item:
value = item["default"]
if key and not kind == "entry" and not value == None:
result[key] = value
return result
def status_info(self):
self.get_scan_info()
duration = self.info["scan_end"]-self.info["scan_start"]
return "%s\nStatus: %s\nStart: %s\nEnd: %s\nDuration: %s" % (
str(self.info["name"]),
str(self.info["status"]),
datetime.datetime.fromtimestamp(self.info["scan_start"]).strftime("%Y/%m/%d %H:%M:%S"),
datetime.datetime.fromtimestamp(self.info["scan_end"]).strftime("%Y/%m/%d %H:%M:%S"),
"%d:%02d" % (int(duration/60),int(duration%60)))
def _error(self,message):
print(message)
sys.exit()
| {
"content_hash": "2c96e525b56debb235294c61444ccc6d",
"timestamp": "",
"source": "github",
"line_count": 301,
"max_line_length": 121,
"avg_line_length": 38.079734219269106,
"alnum_prop": 0.5094224393648578,
"repo_name": "attritionorg/nessrest",
"id": "d33daab41bd997b70c4744048765adddc3af7074",
"size": "11463",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nessrest/ness6scan.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "85296"
}
],
"symlink_target": ""
} |
"""Tests for streams.py"""
import asyncio
import unittest
from unittest import mock
from aiohttp import helpers, streams, test_utils
class TestStreamReader(unittest.TestCase):
DATA = b'line1\nline2\nline3\n'
def setUp(self):
self.time_service = None
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def _make_one(self, *args, **kwargs):
if 'timeout' in kwargs:
self.time_service = helpers.TimeService(self.loop, interval=0.01)
self.addCleanup(self.time_service.close)
kwargs['timer'] = self.time_service.timeout(kwargs.pop('timeout'))
return streams.StreamReader(loop=self.loop, *args, **kwargs)
def test_create_waiter(self):
stream = self._make_one()
stream._waiter = helpers.create_future(self.loop)
with self.assertRaises(RuntimeError):
self.loop.run_until_complete(stream._wait('test'))
@mock.patch('aiohttp.streams.asyncio')
def test_ctor_global_loop(self, m_asyncio):
stream = streams.StreamReader()
self.assertIs(stream._loop, m_asyncio.get_event_loop.return_value)
def test_at_eof(self):
stream = self._make_one()
self.assertFalse(stream.at_eof())
stream.feed_data(b'some data\n')
self.assertFalse(stream.at_eof())
self.loop.run_until_complete(stream.readline())
self.assertFalse(stream.at_eof())
stream.feed_data(b'some data\n')
stream.feed_eof()
self.loop.run_until_complete(stream.readline())
self.assertTrue(stream.at_eof())
def test_wait_eof(self):
stream = self._make_one()
wait_task = asyncio.Task(stream.wait_eof(), loop=self.loop)
def cb():
yield from asyncio.sleep(0.1, loop=self.loop)
stream.feed_eof()
asyncio.Task(cb(), loop=self.loop)
self.loop.run_until_complete(wait_task)
self.assertTrue(stream.is_eof())
self.assertIsNone(stream._eof_waiter)
def test_wait_eof_eof(self):
stream = self._make_one()
stream.feed_eof()
wait_task = asyncio.Task(stream.wait_eof(), loop=self.loop)
self.loop.run_until_complete(wait_task)
self.assertTrue(stream.is_eof())
def test_feed_empty_data(self):
stream = self._make_one()
stream.feed_data(b'')
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_feed_nonempty_data(self):
stream = self._make_one()
stream.feed_data(self.DATA)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(self.DATA, data)
def test_read_zero(self):
# Read zero bytes.
stream = self._make_one()
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.read(0))
self.assertEqual(b'', data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(self.DATA, data)
def test_read(self):
# Read bytes.
stream = self._make_one()
read_task = asyncio.Task(stream.read(30), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(self.DATA, data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_read_line_breaks(self):
# Read bytes without line breaks.
stream = self._make_one()
stream.feed_data(b'line1')
stream.feed_data(b'line2')
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line1', data)
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line2', data)
def test_read_all(self):
# Read all avaliable buffered bytes
stream = self._make_one()
stream.feed_data(b'line1')
stream.feed_data(b'line2')
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'line1line2', data)
def test_read_up_to(self):
# Read available buffered bytes up to requested amount
stream = self._make_one()
stream.feed_data(b'line1')
stream.feed_data(b'line2')
data = self.loop.run_until_complete(stream.read(8))
self.assertEqual(b'line1lin', data)
data = self.loop.run_until_complete(stream.read(8))
self.assertEqual(b'e2', data)
def test_read_eof(self):
# Read bytes, stop at eof.
stream = self._make_one()
read_task = asyncio.Task(stream.read(1024), loop=self.loop)
def cb():
stream.feed_eof()
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'', data)
data = self.loop.run_until_complete(stream.read())
self.assertEqual(data, b'')
@mock.patch('aiohttp.streams.internal_logger')
def test_read_eof_infinit(self, internal_logger):
# Read bytes.
stream = self._make_one()
stream.feed_eof()
self.loop.run_until_complete(stream.read())
self.loop.run_until_complete(stream.read())
self.loop.run_until_complete(stream.read())
self.loop.run_until_complete(stream.read())
self.loop.run_until_complete(stream.read())
self.loop.run_until_complete(stream.read())
self.assertTrue(internal_logger.warning.called)
def test_read_until_eof(self):
# Read all bytes until eof.
stream = self._make_one()
read_task = asyncio.Task(stream.read(-1), loop=self.loop)
def cb():
stream.feed_data(b'chunk1\n')
stream.feed_data(b'chunk2')
stream.feed_eof()
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1\nchunk2', data)
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_read_exception(self):
stream = self._make_one()
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.read(2))
self.assertEqual(b'li', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.read(2))
def test_readline(self):
# Read one line. 'readline' will need to wait for the data
# to come from 'cb'
stream = self._make_one()
stream.feed_data(b'chunk1 ')
read_task = asyncio.Task(stream.readline(), loop=self.loop)
def cb():
stream.feed_data(b'chunk2 ')
stream.feed_data(b'chunk3 ')
stream.feed_data(b'\n chunk4')
self.loop.call_soon(cb)
line = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1 chunk2 chunk3 \n', line)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b' chunk4', data)
def test_readline_limit_with_existing_data(self):
# Read one line. The data is in StreamReader's buffer
# before the event loop is run.
stream = self._make_one(limit=3)
stream.feed_data(b'li')
stream.feed_data(b'ne1\nline2\n')
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# The buffer should contain the remaining data after exception
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'line2\n', data)
def test_readline_limit(self):
# Read one line. StreamReaders are fed with data after
# their 'readline' methods are called.
stream = self._make_one(limit=7)
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2')
stream.feed_data(b'chunk3\n')
stream.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
stream = self._make_one(limit=7)
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2\n')
stream.feed_data(b'chunk3\n')
stream.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'chunk3\n', data)
def test_readline_nolimit_nowait(self):
# All needed data for the first 'readline' call will be
# in the buffer.
stream = self._make_one()
stream.feed_data(self.DATA[:6])
stream.feed_data(self.DATA[6:])
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'line1\n', line)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'line2\nline3\n', data)
def test_readline_eof(self):
stream = self._make_one()
stream.feed_data(b'some data')
stream.feed_eof()
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'some data', line)
def test_readline_empty_eof(self):
stream = self._make_one()
stream.feed_eof()
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'', line)
def test_readline_read_byte_count(self):
stream = self._make_one()
stream.feed_data(self.DATA)
self.loop.run_until_complete(stream.readline())
data = self.loop.run_until_complete(stream.read(7))
self.assertEqual(b'line2\nl', data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'ine3\n', data)
def test_readline_exception(self):
stream = self._make_one()
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'line\n', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
def test_readexactly_zero_or_less(self):
# Read exact number of bytes (zero or less).
stream = self._make_one()
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.readexactly(0))
self.assertEqual(b'', data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(self.DATA, data)
stream = self._make_one()
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.readexactly(-1))
self.assertEqual(b'', data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(self.DATA, data)
def test_readexactly(self):
# Read exact number of bytes.
stream = self._make_one()
n = 2 * len(self.DATA)
read_task = asyncio.Task(stream.readexactly(n), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
stream.feed_data(self.DATA)
stream.feed_data(self.DATA)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(self.DATA + self.DATA, data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(self.DATA, data)
def test_readexactly_eof(self):
# Read exact number of bytes (eof).
stream = self._make_one()
n = 2 * len(self.DATA)
read_task = asyncio.Task(stream.readexactly(n), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
stream.feed_eof()
self.loop.call_soon(cb)
with self.assertRaises(asyncio.IncompleteReadError) as cm:
self.loop.run_until_complete(read_task)
self.assertEqual(cm.exception.partial, self.DATA)
self.assertEqual(cm.exception.expected, n)
self.assertEqual(str(cm.exception),
'18 bytes read on a total of 36 expected bytes')
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_readexactly_exception(self):
stream = self._make_one()
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readexactly(2))
self.assertEqual(b'li', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readexactly(2))
def test_unread_data(self):
stream = self._make_one()
stream.feed_data(b'line1')
stream.feed_data(b'line2')
stream.feed_data(b'onemoreline')
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line1', data)
stream.unread_data(data)
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line1', data)
data = self.loop.run_until_complete(stream.read(4))
self.assertEqual(b'line', data)
stream.unread_data(b'line1line')
data = b''
while len(data) < 10:
data += self.loop.run_until_complete(stream.read(10))
self.assertEqual(b'line1line2', data)
data = self.loop.run_until_complete(stream.read(7))
self.assertEqual(b'onemore', data)
stream.unread_data(data)
data = b''
while len(data) < 11:
data += self.loop.run_until_complete(stream.read(11))
self.assertEqual(b'onemoreline', data)
stream.unread_data(b'line')
data = self.loop.run_until_complete(stream.read(4))
self.assertEqual(b'line', data)
stream.feed_eof()
stream.unread_data(b'at_eof')
data = self.loop.run_until_complete(stream.read(6))
self.assertEqual(b'at_eof', data)
def test_exception(self):
stream = self._make_one()
self.assertIsNone(stream.exception())
exc = ValueError()
stream.set_exception(exc)
self.assertIs(stream.exception(), exc)
def test_exception_waiter(self):
stream = self._make_one()
@asyncio.coroutine
def set_err():
stream.set_exception(ValueError())
t1 = asyncio.Task(stream.readline(), loop=self.loop)
t2 = asyncio.Task(set_err(), loop=self.loop)
self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop))
self.assertRaises(ValueError, t1.result)
def test_exception_cancel(self):
stream = self._make_one()
@asyncio.coroutine
def read_a_line():
yield from stream.readline()
t = asyncio.Task(read_a_line(), loop=self.loop)
test_utils.run_briefly(self.loop)
t.cancel()
test_utils.run_briefly(self.loop)
# The following line fails if set_exception() isn't careful.
stream.set_exception(RuntimeError('message'))
test_utils.run_briefly(self.loop)
self.assertIs(stream._waiter, None)
def test_readany_eof(self):
stream = self._make_one()
read_task = asyncio.Task(stream.readany(), loop=self.loop)
self.loop.call_soon(stream.feed_data, b'chunk1\n')
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1\n', data)
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_readany_empty_eof(self):
stream = self._make_one()
stream.feed_eof()
read_task = asyncio.Task(stream.readany(), loop=self.loop)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'', data)
def test_readany_exception(self):
stream = self._make_one()
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readany())
self.assertEqual(b'line\n', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readany())
def test_read_nowait(self):
stream = self._make_one()
stream.feed_data(b'line1\nline2\n')
self.assertEqual(stream.read_nowait(), b'line1\nline2\n')
self.assertEqual(stream.read_nowait(), b'')
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_read_nowait_n(self):
stream = self._make_one()
stream.feed_data(b'line1\nline2\n')
self.assertEqual(
stream.read_nowait(4), b'line')
self.assertEqual(
stream.read_nowait(), b'1\nline2\n')
self.assertEqual(stream.read_nowait(), b'')
stream.feed_eof()
data = self.loop.run_until_complete(stream.read())
self.assertEqual(b'', data)
def test_read_nowait_exception(self):
stream = self._make_one()
stream.feed_data(b'line\n')
stream.set_exception(ValueError())
self.assertRaises(ValueError, stream.read_nowait)
def test_read_nowait_waiter(self):
stream = self._make_one()
stream.feed_data(b'line\n')
stream._waiter = helpers.create_future(self.loop)
self.assertRaises(RuntimeError, stream.read_nowait)
def test_readchunk(self):
stream = self._make_one()
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2')
stream.feed_eof()
self.loop.call_soon(cb)
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'chunk1', data)
self.assertFalse(end_of_chunk)
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'chunk2', data)
self.assertFalse(end_of_chunk)
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'', data)
self.assertFalse(end_of_chunk)
def test_readchunk_wait_eof(self):
stream = self._make_one()
def cb():
yield from asyncio.sleep(0.1, loop=self.loop)
stream.feed_eof()
asyncio.Task(cb(), loop=self.loop)
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b"", data)
self.assertFalse(end_of_chunk)
self.assertTrue(stream.is_eof())
def test_begin_and_end_chunk_receiving(self):
stream = self._make_one()
stream.begin_http_chunk_receiving()
stream.feed_data(b'part1')
stream.feed_data(b'part2')
stream.end_http_chunk_receiving()
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'part1part2', data)
self.assertTrue(end_of_chunk)
stream.begin_http_chunk_receiving()
stream.feed_data(b'part3')
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'part3', data)
self.assertFalse(end_of_chunk)
stream.end_http_chunk_receiving()
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'', data)
self.assertTrue(end_of_chunk)
stream.feed_eof()
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'', data)
self.assertFalse(end_of_chunk)
def test_end_chunk_receiving_without_begin(self):
stream = self._make_one()
self.assertRaises(RuntimeError, stream.end_http_chunk_receiving)
def test_readchunk_with_unread(self):
"""Test that stream.unread does not break controlled chunk receiving.
"""
stream = self._make_one()
# Send 2 chunks
stream.begin_http_chunk_receiving()
stream.feed_data(b'part1')
stream.end_http_chunk_receiving()
stream.begin_http_chunk_receiving()
stream.feed_data(b'part2')
stream.end_http_chunk_receiving()
# Read only one chunk
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
# Try to unread a part of the first chunk
stream.unread_data(b'rt1')
# The end_of_chunk signal was already received for the first chunk,
# so we receive up to the second one
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'rt1part2', data)
self.assertTrue(end_of_chunk)
# Unread a part of the second chunk
stream.unread_data(b'rt2')
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'rt2', data)
# end_of_chunk was already received for this chunk
self.assertFalse(end_of_chunk)
stream.feed_eof()
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'', data)
self.assertFalse(end_of_chunk)
def test_readchunk_with_other_read_calls(self):
"""Test that stream.readchunk works when other read calls are made on
the stream.
"""
stream = self._make_one()
stream.begin_http_chunk_receiving()
stream.feed_data(b'part1')
stream.end_http_chunk_receiving()
stream.begin_http_chunk_receiving()
stream.feed_data(b'part2')
stream.end_http_chunk_receiving()
data = self.loop.run_until_complete(stream.read(7))
self.assertEqual(b'part1pa', data)
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'rt2', data)
self.assertTrue(end_of_chunk)
stream.feed_eof()
data, end_of_chunk = self.loop.run_until_complete(stream.readchunk())
self.assertEqual(b'', data)
self.assertFalse(end_of_chunk)
def test___repr__(self):
stream = self._make_one()
self.assertEqual("<StreamReader>", repr(stream))
def test___repr__nondefault_limit(self):
stream = self._make_one(limit=123)
self.assertEqual("<StreamReader l=123>", repr(stream))
def test___repr__eof(self):
stream = self._make_one()
stream.feed_eof()
self.assertEqual("<StreamReader eof>", repr(stream))
def test___repr__data(self):
stream = self._make_one()
stream.feed_data(b'data')
self.assertEqual("<StreamReader 4 bytes>", repr(stream))
def test___repr__exception(self):
stream = self._make_one()
exc = RuntimeError()
stream.set_exception(exc)
self.assertEqual("<StreamReader e=RuntimeError()>", repr(stream))
def test___repr__waiter(self):
stream = self._make_one()
stream._waiter = helpers.create_future(self.loop)
self.assertRegex(
repr(stream),
"<StreamReader w=<Future pending[\S ]*>>")
stream._waiter.set_result(None)
self.loop.run_until_complete(stream._waiter)
stream._waiter = None
self.assertEqual("<StreamReader>", repr(stream))
def test_unread_empty(self):
stream = self._make_one()
stream.feed_data(b'line1')
stream.feed_eof()
stream.unread_data(b'')
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line1', data)
self.assertTrue(stream.at_eof())
class TestEmptyStreamReader(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def test_empty_stream_reader(self):
s = streams.EmptyStreamReader()
self.assertIsNone(s.set_exception(ValueError()))
self.assertIsNone(s.exception())
self.assertIsNone(s.feed_eof())
self.assertIsNone(s.feed_data(b'data'))
self.assertTrue(s.at_eof())
self.assertIsNone(
self.loop.run_until_complete(s.wait_eof()))
self.assertEqual(
self.loop.run_until_complete(s.read()), b'')
self.assertEqual(
self.loop.run_until_complete(s.readline()), b'')
self.assertEqual(
self.loop.run_until_complete(s.readany()), b'')
self.assertEqual(
self.loop.run_until_complete(s.readchunk()), (b'', False))
self.assertRaises(
asyncio.IncompleteReadError,
self.loop.run_until_complete, s.readexactly(10))
self.assertEqual(s.read_nowait(), b'')
class DataQueueMixin:
def test_is_eof(self):
self.assertFalse(self.buffer.is_eof())
self.buffer.feed_eof()
self.assertTrue(self.buffer.is_eof())
def test_at_eof(self):
self.assertFalse(self.buffer.at_eof())
self.buffer.feed_eof()
self.assertTrue(self.buffer.at_eof())
self.buffer._buffer.append(object())
self.assertFalse(self.buffer.at_eof())
def test_feed_data(self):
item = object()
self.buffer.feed_data(item, 1)
self.assertEqual([(item, 1)], list(self.buffer._buffer))
def test_feed_eof(self):
self.buffer.feed_eof()
self.assertTrue(self.buffer._eof)
def test_read(self):
item = object()
read_task = asyncio.Task(self.buffer.read(), loop=self.loop)
def cb():
self.buffer.feed_data(item, 1)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertIs(item, data)
def test_read_eof(self):
read_task = asyncio.Task(self.buffer.read(), loop=self.loop)
def cb():
self.buffer.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
streams.EofStream, self.loop.run_until_complete, read_task)
def test_read_cancelled(self):
read_task = asyncio.Task(self.buffer.read(), loop=self.loop)
test_utils.run_briefly(self.loop)
waiter = self.buffer._waiter
self.assertTrue(helpers.isfuture(waiter))
read_task.cancel()
self.assertRaises(
asyncio.CancelledError,
self.loop.run_until_complete, read_task)
self.assertTrue(waiter.cancelled())
self.assertIsNone(self.buffer._waiter)
self.buffer.feed_data(b'test', 4)
self.assertIsNone(self.buffer._waiter)
def test_read_until_eof(self):
item = object()
self.buffer.feed_data(item, 1)
self.buffer.feed_eof()
data = self.loop.run_until_complete(self.buffer.read())
self.assertIs(data, item)
self.assertRaises(
streams.EofStream,
self.loop.run_until_complete, self.buffer.read())
def test_read_exc(self):
item = object()
self.buffer.feed_data(item)
self.buffer.set_exception(ValueError)
read_task = asyncio.Task(self.buffer.read(), loop=self.loop)
data = self.loop.run_until_complete(read_task)
self.assertIs(item, data)
self.assertRaises(
ValueError, self.loop.run_until_complete, self.buffer.read())
def test_read_exception(self):
self.buffer.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, self.buffer.read())
def test_read_exception_with_data(self):
val = object()
self.buffer.feed_data(val, 1)
self.buffer.set_exception(ValueError())
self.assertIs(val, self.loop.run_until_complete(self.buffer.read()))
self.assertRaises(
ValueError, self.loop.run_until_complete, self.buffer.read())
def test_read_exception_on_wait(self):
read_task = asyncio.Task(self.buffer.read(), loop=self.loop)
test_utils.run_briefly(self.loop)
self.assertTrue(helpers.isfuture(self.buffer._waiter))
self.buffer.feed_eof()
self.buffer.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, read_task)
def test_exception(self):
self.assertIsNone(self.buffer.exception())
exc = ValueError()
self.buffer.set_exception(exc)
self.assertIs(self.buffer.exception(), exc)
def test_exception_waiter(self):
@asyncio.coroutine
def set_err():
self.buffer.set_exception(ValueError())
t1 = asyncio.Task(self.buffer.read(), loop=self.loop)
t2 = asyncio.Task(set_err(), loop=self.loop)
self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop))
self.assertRaises(ValueError, t1.result)
class TestDataQueue(unittest.TestCase, DataQueueMixin):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.buffer = streams.DataQueue(loop=self.loop)
def tearDown(self):
self.loop.close()
class TestChunksQueue(unittest.TestCase, DataQueueMixin):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.buffer = streams.ChunksQueue(loop=self.loop)
def tearDown(self):
self.loop.close()
def test_read_eof(self):
read_task = asyncio.Task(self.buffer.read(), loop=self.loop)
def cb():
self.buffer.feed_eof()
self.loop.call_soon(cb)
self.loop.run_until_complete(read_task)
self.assertTrue(self.buffer.at_eof())
def test_read_until_eof(self):
item = object()
self.buffer.feed_data(item, 1)
self.buffer.feed_eof()
data = self.loop.run_until_complete(self.buffer.read())
self.assertIs(data, item)
thing = self.loop.run_until_complete(self.buffer.read())
self.assertEqual(thing, b'')
self.assertTrue(self.buffer.at_eof())
def test_readany(self):
self.assertIs(self.buffer.read.__func__, self.buffer.readany.__func__)
def test_feed_data_waiters(loop):
reader = streams.StreamReader(loop=loop)
waiter = reader._waiter = helpers.create_future(loop)
eof_waiter = reader._eof_waiter = helpers.create_future(loop)
reader.feed_data(b'1')
assert list(reader._buffer) == [b'1']
assert reader._size == 1
assert reader.total_bytes == 1
assert waiter.done()
assert not eof_waiter.done()
assert reader._waiter is None
assert reader._eof_waiter is eof_waiter
def test_feed_data_completed_waiters(loop):
reader = streams.StreamReader(loop=loop)
waiter = reader._waiter = helpers.create_future(loop)
waiter.set_result(1)
reader.feed_data(b'1')
assert reader._waiter is None
def test_feed_eof_waiters(loop):
reader = streams.StreamReader(loop=loop)
waiter = reader._waiter = helpers.create_future(loop)
eof_waiter = reader._eof_waiter = helpers.create_future(loop)
reader.feed_eof()
assert reader._eof
assert waiter.done()
assert eof_waiter.done()
assert reader._waiter is None
assert reader._eof_waiter is None
def test_feed_eof_cancelled(loop):
reader = streams.StreamReader(loop=loop)
waiter = reader._waiter = helpers.create_future(loop)
eof_waiter = reader._eof_waiter = helpers.create_future(loop)
waiter.set_result(1)
eof_waiter.set_result(1)
reader.feed_eof()
assert waiter.done()
assert eof_waiter.done()
assert reader._waiter is None
assert reader._eof_waiter is None
def test_on_eof(loop):
reader = streams.StreamReader(loop=loop)
on_eof = mock.Mock()
reader.on_eof(on_eof)
assert not on_eof.called
reader.feed_eof()
assert on_eof.called
def test_on_eof_empty_reader(loop):
reader = streams.EmptyStreamReader()
on_eof = mock.Mock()
reader.on_eof(on_eof)
assert on_eof.called
def test_on_eof_exc_in_callback(loop):
reader = streams.StreamReader(loop=loop)
on_eof = mock.Mock()
on_eof.side_effect = ValueError
reader.on_eof(on_eof)
assert not on_eof.called
reader.feed_eof()
assert on_eof.called
assert not reader._eof_callbacks
def test_on_eof_exc_in_callback_empty_stream_reader(loop):
reader = streams.EmptyStreamReader()
on_eof = mock.Mock()
on_eof.side_effect = ValueError
reader.on_eof(on_eof)
assert on_eof.called
def test_on_eof_eof_is_set(loop):
reader = streams.StreamReader(loop=loop)
reader.feed_eof()
on_eof = mock.Mock()
reader.on_eof(on_eof)
assert on_eof.called
assert not reader._eof_callbacks
def test_on_eof_eof_is_set_exception(loop):
reader = streams.StreamReader(loop=loop)
reader.feed_eof()
on_eof = mock.Mock()
on_eof.side_effect = ValueError
reader.on_eof(on_eof)
assert on_eof.called
assert not reader._eof_callbacks
def test_set_exception(loop):
reader = streams.StreamReader(loop=loop)
waiter = reader._waiter = helpers.create_future(loop)
eof_waiter = reader._eof_waiter = helpers.create_future(loop)
exc = ValueError()
reader.set_exception(exc)
assert waiter.exception() is exc
assert eof_waiter.exception() is exc
assert reader._waiter is None
assert reader._eof_waiter is None
def test_set_exception_cancelled(loop):
reader = streams.StreamReader(loop=loop)
waiter = reader._waiter = helpers.create_future(loop)
eof_waiter = reader._eof_waiter = helpers.create_future(loop)
waiter.set_result(1)
eof_waiter.set_result(1)
exc = ValueError()
reader.set_exception(exc)
assert waiter.exception() is None
assert eof_waiter.exception() is None
assert reader._waiter is None
assert reader._eof_waiter is None
def test_set_exception_eof_callbacks(loop):
reader = streams.StreamReader(loop=loop)
on_eof = mock.Mock()
reader.on_eof(on_eof)
reader.set_exception(ValueError())
assert not on_eof.called
assert not reader._eof_callbacks
| {
"content_hash": "52760abaa318921d406a549f7fea6bb1",
"timestamp": "",
"source": "github",
"line_count": 1096,
"max_line_length": 78,
"avg_line_length": 31.256386861313867,
"alnum_prop": 0.6170125813702309,
"repo_name": "Eyepea/aiohttp",
"id": "d233b3e456468dfb11960f38162d4c14c88abaa2",
"size": "34257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_streams.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1738"
},
{
"name": "PowerShell",
"bytes": "3361"
},
{
"name": "Python",
"bytes": "935198"
}
],
"symlink_target": ""
} |
import os
import regex as re
from errors import ContentTypeMismatch,FileNotFound
import utils
import base64
class Context(object):
def __init__(self,environment,logical_path,pathname):
self.environment = environment
self._logical_path = logical_path
self.pathname = pathname
self.line = None
self.object_id = id(self)
self.required_paths = []
self.stubbed_assets = []
self.dependency_paths = []
self.dependency_assets = [pathname]
@property
def root_path(self):
for path in self.environment.paths:
if re.search(re.escape(path),self.pathname):
return path
return ""
@property
def logical_path(self):
filename,extname = os.path.splitext(self._logical_path)
return re.sub(r"""%s$"""%extname,'',self._logical_path)
@property
def content_type(self):
return self.environment.get_content_type_of(self.pathname)
def evaluate(self,path,**options):
pathname = self.resolve(path)
attributes = self.environment.get_attributes_for(pathname)
processors = options['processors'] if options.has_key('processors') else attributes.processors
if options.has_key('data'):
result = options['data']
else:
if hasattr(self.environment,'default_encoding'):
filename,ext = os.path.splitext(pathname)
encoding = self.environment.default_encoding
result = utils.read_unicode(pathname,encoding)
else:
result = utils.read_unicode(pathname)
for processor in processors:
try:
template = processor(pathname,block=lambda x: result)
result = template.render(self,{})
except Exception,e:
self.annotate_exception(e)
return result
def resolve(self,path,**options):
attributes = self.environment.get_attributes_for(path)
if os.path.isabs(path):
if self.environment.stat(path):
return path
else:
raise FileNotFound("Couldn't find file '%s'" % path)
elif options.has_key('content_type') and options['content_type']:
content_type = self.content_type if options['content_type'] == 'self' else options['content_type']
if attributes.format_extension:
attr_content_type = attributes.content_type
if content_type != attr_content_type:
raise ContentTypeMismatch("%s is '%s', not '%s'"%(path,attr_content_type,content_type))
def return_candidate(candidate):
if self.content_type == self.environment.get_content_type_of(candidate):
return candidate
asset = self.resolve(path,callback=return_candidate)
if asset:
return asset
raise FileNotFound("Couldn't find file '%s'" % path)
else:
options['base_path'] = os.path.realpath(os.path.dirname(self.pathname))
return self.environment.resolve(path,**options)
def depend_on(self,path):
self.dependency_paths.append(self.resolve(path))
def depend_on_asset(self,path):
filename = self.resolve(path)
if filename:
self.dependency_assets.append(filename)
def require_asset(self,path):
pathname = self.resolve(path,content_type='self')
if pathname:
self.depend_on_asset(pathname)
self.required_paths.append(pathname)
def stub_asset(self,path):
self.stubbed_assets.append(self.resolve(path,content_type ='self'))
def is_asset_requirable(self,path):
pathname = self.resolve(path)
content_type = self.environment.get_content_type_of(pathname)
requirable = False
if os.path.exists(pathname) and os.path.isfile(pathname):
if self.content_type and content_type == self.content_type:
requirable = True
return requirable
def annotate_exception(self,exception):
location = self.pathname
location += ":%s" % self.line if self.line else ''
raise exception.__class__("%s\\n (in %s)" % (str(exception),location))
def asset_data_uri(self,path):
self.depend_on_asset(path)
asset = self.environment.find_asset(path)
s = base64.b64encode(str(asset))
from urllib import quote_plus
return "data:%s;base64,%s" % (asset.content_type,quote_plus(s))
| {
"content_hash": "8e09c70f106bea047f901d990d0a0346",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 101,
"avg_line_length": 29.007462686567163,
"alnum_prop": 0.7134036532029843,
"repo_name": "OiNutter/rivets",
"id": "4648aff3b3d87248582900bd40357f20da0315fd",
"size": "3887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rivets/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "584"
},
{
"name": "CoffeeScript",
"bytes": "7131"
},
{
"name": "JavaScript",
"bytes": "818"
},
{
"name": "Python",
"bytes": "164710"
}
],
"symlink_target": ""
} |
__author__ = 'Michael'
from gensim.models import Word2Vec
import EmotionSynsets
import numpy as np
#Get the most likely emotion for a given word vector:
def get_most_likely_emotion(vector_synsets, emotion_labels, word_vector, strategy='average'):
if strategy == 'average':
dist = float('Inf')
current_emotion = None
for i,s in enumerate(vector_synsets):
if s:
cluster_centroid = np.mean(s, axis=0)
d = np.linalg.norm(cluster_centroid - word_vector)
if d < dist:
dist = d
current_emotion = emotion_labels[i]
return current_emotion,dist
'''
Execution:
'''
if __name__ == '__main__':
model = Word2Vec.load('data/full_model')
synsets, labels = EmotionSynsets.read_emotion_annotations('SentiSense_English_WordNet_3.0/SentiSense_Synsets_EN_30.xml')
vector_synsets = [[model[l.name] for l in synset.lemmas if l.name in model] for synset in synsets]
print get_most_likely_emotion(vector_synsets, labels, model['fish'])
| {
"content_hash": "fdc99bbb3cadfc37980cef7de5c7392c",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 124,
"avg_line_length": 31.529411764705884,
"alnum_prop": 0.6305970149253731,
"repo_name": "MichSchli/EmoticonSynonyms",
"id": "b4f7db6ab1fe33e0cc6f0c5c46536cdc2f5d0835",
"size": "1072",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EmotionMatching.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1770"
}
],
"symlink_target": ""
} |
from datetime import datetime
from functools import wraps, update_wrapper
from flask import abort, make_response
from flask_login import current_user
from .models import Permission
def permission_required(permission):
'''检查一般权限'''
def decorator(func):
@wraps(func) # 将传入函数的 __name__ 等属性复制到 decorated_function 函数
def decorated_function(*args, **kwargs):
if not current_user.can(permission):
abort(403) # 如果用户不具有指定权限, 则返回403
return func(*args, **kwargs)
return decorated_function
return decorator
def admin_required(func):
'''检查管理员权限'''
return permission_required(Permission.ADMINISTER)(func)
# http://arusahni.net/blog/2014/03/flask-nocache.html
def nocache(func):
'''添加缓存头, 使浏览器缓存失效'''
@wraps(func)
def decorated_function(*args, **kwargs):
response = make_response(func(*args, **kwargs))
response.last_modified = datetime.now() # Last-Modified
response.expires = -1 # Expires
response.cache_control.max_age = 0 # max-age
response.cache_control.no_cache = True # no-cache
response.cache_control.no_store = True # no-store
response.cache_control.must_revalidate = True # must-revalidate
response.headers['Cache-Control'] += ', post-check=0, pre-check=0'
response.headers['Pragma'] = 'no-cache'
return response
return update_wrapper(decorated_function, func) # 从原包装函数复制属性
| {
"content_hash": "257602025c5405e2ad69429909ee3873",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 36.6,
"alnum_prop": 0.6639344262295082,
"repo_name": "1dot75cm/flasky",
"id": "4f2cb186708c4c81efe212080d3681a7ff8e0ca4",
"size": "1608",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/decorators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5949"
},
{
"name": "HTML",
"bytes": "38359"
},
{
"name": "JavaScript",
"bytes": "5311"
},
{
"name": "Makefile",
"bytes": "1427"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Nginx",
"bytes": "485"
},
{
"name": "Python",
"bytes": "186753"
}
],
"symlink_target": ""
} |
"""Testing malloovia hybrid model"""
from typing import Tuple
import pytest # type: ignore
import ruamel.yaml
from jsonschema import validate
from pulp import COIN, PulpSolverError # type: ignore
yaml = ruamel.yaml.YAML(typ="safe")
yaml.safe_load = yaml.load
from malloovia.model import (
LimitingSet,
InstanceClass,
App,
Workload,
Problem,
PerformanceSet,
PerformanceValues,
system_from_problem,
)
from malloovia import util
from malloovia import phases
from malloovia.solution_model import (
SolutionI, Status,
ReservedAllocation,
AllocationInfo,
MallooviaHistogram,
compute_allocation_cost,
compute_allocation_performance,
)
from malloovia import lpsolver
from .datapaths import PresetDataPaths
def create_problem():
# Malloovia's classic limiting sets, one public, another private
r1 = LimitingSet("r1", name="us-east-1", max_vms=20)
r2 = LimitingSet("r2", name="private-limits", max_cores=20)
m3large = InstanceClass(
"m3large",
name="m3large",
limiting_sets=(r1,),
max_vms=20,
price=10,
time_unit="h",
)
m3large_r = InstanceClass(
"m3large_r",
name="m3large_r",
limiting_sets=(r1,),
max_vms=20,
price=7,
time_unit="h",
is_reserved=True,
)
m3priv = InstanceClass(
"m3large_priv",
name="m3large_priv",
limiting_sets=(r2,),
max_vms=20,
price=1,
cores=5,
time_unit="h",
is_private=True,
is_reserved=True
)
m3priv2 = InstanceClass(
"m3xlarge_priv",
name="m3xlarge_priv",
limiting_sets=(r2,),
max_vms=20,
price=1,
cores=10,
time_unit="h",
is_private=True,
is_reserved=True
)
app0 = App("app0", name="Test app0")
app1 = App("app1", name="Test app1")
workloads = (
Workload(
"wl_app0",
description="Test",
app=app0,
values=(100, 120, 100, 100),
time_unit="h",
),
Workload(
"wl_app1",
description="Test",
app=app1,
values=(1003, 1200, 1194, 1003),
time_unit="h",
),
)
performances = PerformanceSet(
id="test_perfs",
time_unit="h",
values=PerformanceValues(
{m3large: {app0: 10, app1: 500}, m3large_r: {app0: 10, app1: 500},
m3priv: {app0: 9, app1: 450},
m3priv2: {app0: 25, app1: 1000}}
),
)
problem_phase_i = Problem(
id="example",
name="Example hybrid cloud",
workloads=workloads,
instance_classes=(m3large, m3large_r, m3priv, m3priv2),
performances=performances,
)
return problem_phase_i
class TestExampleProblem:
def test_solve_problem_with_private_constraints(self):
"""This test creates and solves one problem in which the private cloud
imposes limits on the maximum number of cores"""
problem = create_problem()
phase_i = phases.PhaseI(problem)
solution = phase_i.solve()
assert solution.solving_stats.algorithm.status == Status.optimal
# The cost of the optimal solution has to be 252.0 for this problem
assert solution.solving_stats.optimal_cost == 252.0
# There is a single private limiting set
private_limiting_sets = set(ls
for ic in problem.instance_classes if ic.is_private
for ls in ic.limiting_sets)
assert len(private_limiting_sets) == 1
private_limiting_set = list(private_limiting_sets)[0]
# That limiting set imposes a limit in the number of cores
max_cores = private_limiting_set.max_cores
assert max_cores == 20
# The solution does not violate the max_cores limit
used_cores = []
for ts_alloc in solution.allocation.values:
used_cores_in_ts = 0
for app_alloc in ts_alloc:
for i, ic_number in enumerate(app_alloc):
ic = solution.allocation.instance_classes[i]
if ic.is_private:
used_cores_in_ts += ic_number * ic.cores
used_cores.append(used_cores_in_ts)
assert all(n_cores <= max_cores for n_cores in used_cores)
class TestStorageYaml:
def test_problem_with_private_to_yaml(self):
"""Creates a problem which uses private instances, converts it to YAML,
checks that the resulting YAML is valid, and finally reads it back
to Python and compares it with the initial problem"""
problem = create_problem()
# Create yaml version
from malloovia import util
yaml_str = util.problems_to_yaml({"Example hybrid cloud": problem})
# Optionally write it actually to disk, to visually inspect it
# with open("/tmp/test.yaml", "w") as f:
# f.write(yaml_str)
# Check that the generated problem is valid against the schema
problem_data = yaml.safe_load(yaml_str)
malloovia_schema = util.get_schema()
try:
validate(problem_data, malloovia_schema)
except Exception as e:
pytest.fail("The generated yaml is not valid against the schema")
# The actual test is to read it back
back_to_problems = util.problems_from_dict(
yaml.safe_load(yaml_str), yaml_filename="Nofile"
)
# Compare malloovia classes to ensure that they store the same information in the
# original problem.
assert problem == back_to_problems["example"]
def test_solution_with_hybrid_to_yaml_back_and_forth(self):
"""Creates and solves a problem which uses private instances,
converts the solution to YAML, checks that the resulting YAML
is valid, and finally reads it back to Python and compares it
with the solution object"""
problem = create_problem()
solution = phases.PhaseI(problem).solve()
# Dump solution to yaml
from malloovia import util
yaml_str = util.solutions_to_yaml([solution])
with open("/tmp/test.yaml", "w") as f:
f.write(yaml_str)
# Check that the generated solution is valid against the schema
solution_data = yaml.safe_load(yaml_str)
malloovia_schema = util.get_schema()
try:
validate(solution_data, malloovia_schema)
except Exception as e:
pytest.fail("The generated yaml is not valid against the schema")
# The actual test is to read it back
back_to_solution = util.solutions_from_dict(
yaml.safe_load(yaml_str), yaml_filename="Nofile"
)
# Compare malloovia classes to ensure that they store the same information in the
# original problem.
assert solution == back_to_solution['solution_i_example']
| {
"content_hash": "5249baf6dc2b13ce88b9bc2aacbf9c51",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 89,
"avg_line_length": 33.59241706161137,
"alnum_prop": 0.5955135440180587,
"repo_name": "asi-uniovi/malloovia",
"id": "baf025c6db92dec558654f7c2f41fbe21ac74fa4",
"size": "7088",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_hybrid.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2370"
},
{
"name": "Python",
"bytes": "223278"
}
],
"symlink_target": ""
} |
import json
import os
import shutil
from subprocess import check_output, check_call, CalledProcessError
import tempfile
import urllib
import psutil
from .. import constants
from ..daemon import close_client_connection, clean_up_socket, shut_down_http_server
from ..log import log_to_client
from ..payload import daemon_command
from ..subprocess import check_and_log_output_and_error
def _get_latest_version():
"""Gets latest Dusty binary version using the GitHub api"""
url = 'https://api.github.com/repos/{}/releases/latest'.format(constants.DUSTY_GITHUB_PATH)
conn = urllib.urlopen(url)
if conn.getcode() >= 300:
raise RuntimeError('GitHub api returned code {}; can\'t determine latest version. Aborting'.format(conn.getcode()))
json_data = conn.read()
return json.loads(json_data)['tag_name']
def _get_binary_url(version):
return 'https://github.com/{}/releases/download/{}/{}'.format(constants.DUSTY_GITHUB_PATH, version, constants.DUSTY_BINARY_NAME)
def _get_binary_location():
return psutil.Process().exe()
def _download_binary(version):
binary_url = _get_binary_url(version)
conn = urllib.urlopen(binary_url)
if conn.getcode() >= 300:
raise RuntimeError('Unable to retrieve Dusty binary version {} from GitHub; this version may not exist'.format(version))
binary_data = conn.read()
tmp_path = tempfile.mktemp()
with open(tmp_path, 'w') as f:
f.write(binary_data)
os.chmod(tmp_path, 0755)
return tmp_path
def _test_dusty_binary(binary_path, version):
try:
output = check_output([binary_path, '-v']).rstrip()
except CalledProcessError:
raise RuntimeError('Downloaded binary is not operating correctly; aborting upgrade')
test_version = output.split()[-1]
if 'RC' in version:
log_to_client('Release candidate requested, skipping version check')
return
if test_version != version:
raise RuntimeError('Version of downloaded binary {} does not match expected {}'.format(test_version, version))
check_and_log_output_and_error([binary_path, '-d', '--preflight-only'], demote=False, quiet_on_success=True)
def _move_temp_binary_to_path(tmp_binary_path):
"""Moves the temporary binary to the location of the binary that's currently being run.
Preserves owner, group, and permissions of original binary"""
# pylint: disable=E1101
binary_path = _get_binary_location()
if not binary_path.endswith(constants.DUSTY_BINARY_NAME):
raise RuntimeError('Refusing to overwrite binary {}'.format(binary_path))
st = os.stat(binary_path)
permissions = st.st_mode
owner = st.st_uid
group = st.st_gid
shutil.move(tmp_binary_path, binary_path)
os.chown(binary_path, owner, group)
os.chmod(binary_path, permissions)
return binary_path
@daemon_command
def upgrade_dusty_binary(version=None):
if not constants.BINARY:
log_to_client('It looks like you\'re running Dusty from source. Upgrade is only available when you use the installed binary')
return
if version is None:
version = _get_latest_version()
if not constants.PRERELEASE and version == constants.VERSION:
log_to_client('You\'re already running the latest Dusty version ({})'.format(version))
return
else:
log_to_client('Downloading Dusty version {}'.format(version))
tmp_binary_path = _download_binary(version)
_test_dusty_binary(tmp_binary_path, version)
final_binary_path = _move_temp_binary_to_path(tmp_binary_path)
log_to_client('Finished upgrade to version {} of Dusty! The daemon will now restart'.format(version))
log_to_client('You may need to run `dusty up` to fully complete the upgrade')
shut_down_http_server()
close_client_connection()
clean_up_socket()
os.execvp(final_binary_path, [final_binary_path, '-d'])
| {
"content_hash": "10f4e75faaf9e689691536d96eb39fef",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 133,
"avg_line_length": 41.763440860215056,
"alnum_prop": 0.6985066941297632,
"repo_name": "gamechanger/dusty",
"id": "5fd8c5ebb0496428c089dbf6e3f7f90a1d4a6437",
"size": "3884",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dusty/commands/upgrade.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "845"
},
{
"name": "JavaScript",
"bytes": "1675"
},
{
"name": "Python",
"bytes": "493669"
},
{
"name": "Ruby",
"bytes": "769"
},
{
"name": "Shell",
"bytes": "3875"
}
],
"symlink_target": ""
} |
"""
Methods to extend functionality of the VBase4 class
"""
def pPrintValues(self):
"""
Pretty print
"""
return "% 10.4f, % 10.4f, % 10.4f, % 10.4f" % (self[0], self[1], self[2], self[3])
Dtool_funcToMethod(pPrintValues, VBase4)
del pPrintValues
def asTuple(self):
"""
Returns the vector as a tuple.
"""
print "Warning: VBase4.asTuple() is no longer needed and deprecated. Use the vector directly instead."
return tuple(self)
Dtool_funcToMethod(asTuple, VBase4)
del asTuple
| {
"content_hash": "14fb5598d42046ffcdc41725ad6a560e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 107,
"avg_line_length": 24.476190476190474,
"alnum_prop": 0.6614785992217899,
"repo_name": "jjkoletar/panda3d",
"id": "8b7c022593c0deec97a225a73b81c3af22640cad",
"size": "514",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "direct/src/extensions_native/VBase4_extensions.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "6435690"
},
{
"name": "C++",
"bytes": "31389522"
},
{
"name": "Emacs Lisp",
"bytes": "166274"
},
{
"name": "HTML",
"bytes": "8081"
},
{
"name": "Java",
"bytes": "3777"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Logos",
"bytes": "5504"
},
{
"name": "NSIS",
"bytes": "91955"
},
{
"name": "Nemerle",
"bytes": "7733"
},
{
"name": "Objective-C",
"bytes": "12290"
},
{
"name": "Objective-C++",
"bytes": "298197"
},
{
"name": "Pascal",
"bytes": "53710"
},
{
"name": "Perl",
"bytes": "206982"
},
{
"name": "Perl6",
"bytes": "30612"
},
{
"name": "Puppet",
"bytes": "752377"
},
{
"name": "Python",
"bytes": "5860103"
},
{
"name": "Rebol",
"bytes": "421"
},
{
"name": "Shell",
"bytes": "59984"
},
{
"name": "Visual Basic",
"bytes": "136"
}
],
"symlink_target": ""
} |
"""Manul Integration"""
import os
import subprocess
import shutil
from fuzzers import utils
from fuzzers.afl import fuzzer as afl_fuzzer
def build():
"""Build benchmark and copy fuzzer to $OUT."""
afl_fuzzer.prepare_build_environment()
utils.build_benchmark()
# Move manul base to /out.
shutil.move('/manul', os.environ['OUT'])
def fuzz(input_corpus, output_corpus, target_binary):
"""Run fuzzer.
Arguments:
input_corpus: Directory containing the initial seed corpus for
the benchmark.
output_corpus: Output directory to place the newly generated corpus
from fuzzer run.
target_binary: Absolute path to the fuzz target binary.
"""
afl_fuzzer.prepare_fuzz_environment(input_corpus)
# Run fuzzer on the benchmark.
manul_directory = os.path.join(os.environ['OUT'], 'manul')
command = ([
'python3', 'manul.py', '-i', input_corpus, '-o', output_corpus, '-c',
os.path.join(manul_directory, 'manul_lin.config'), target_binary + ' @@'
])
subprocess.check_call(command, cwd=manul_directory)
| {
"content_hash": "17b7548288c347533861b4b0b76fb6fd",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 80,
"avg_line_length": 32.88235294117647,
"alnum_prop": 0.6556350626118068,
"repo_name": "google/fuzzbench",
"id": "34af755b76a7e14097b04f7fe45f07fc2941b82c",
"size": "1688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fuzzers/manul/fuzzer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "17334"
},
{
"name": "C++",
"bytes": "37645"
},
{
"name": "Dockerfile",
"bytes": "337043"
},
{
"name": "HTML",
"bytes": "25840"
},
{
"name": "Jupyter Notebook",
"bytes": "578996"
},
{
"name": "Makefile",
"bytes": "2810"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "1222236"
},
{
"name": "Shell",
"bytes": "86157"
}
],
"symlink_target": ""
} |
""" public toolkit API """
from pandas.api import (
extensions,
indexers,
interchange,
types,
)
__all__ = [
"interchange",
"extensions",
"indexers",
"types",
]
| {
"content_hash": "833f950bd16c75de73592b990ab4e291",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 26,
"avg_line_length": 13.785714285714286,
"alnum_prop": 0.5492227979274611,
"repo_name": "pandas-dev/pandas",
"id": "9d4f721225d93aab3bd00a9eeac11ec8eacf118b",
"size": "193",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pandas/api/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "512"
},
{
"name": "C",
"bytes": "366145"
},
{
"name": "CSS",
"bytes": "1800"
},
{
"name": "Cython",
"bytes": "1186787"
},
{
"name": "Dockerfile",
"bytes": "1411"
},
{
"name": "HTML",
"bytes": "456531"
},
{
"name": "Python",
"bytes": "18778786"
},
{
"name": "Shell",
"bytes": "10369"
},
{
"name": "Smarty",
"bytes": "8486"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
} |
import itertools, os, re
from time import gmtime, strftime
# django imports
from django.shortcuts import render_to_response, HttpResponse
from django.template import RequestContext as Context
from django.http import HttpResponseRedirect
from django.contrib.admin.views.decorators import staff_member_required
from django.views.decorators.cache import never_cache
from django.utils.translation import ugettext as _
from django.conf import settings
from django import forms
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured
from django.dispatch import Signal
from django.core.paginator import Paginator, InvalidPage, EmptyPage
try:
# django SVN
from django.views.decorators.csrf import csrf_exempt
except:
# django 1.1
from django.contrib.csrf.middleware import csrf_exempt
# filebrowser imports
from filebrowser.settings import *
from filebrowser.functions import path_to_url, sort_by_attr, get_path, get_file, get_version_path, get_breadcrumbs, get_filterdate, get_settings_var, handle_file_upload, convert_filename
from filebrowser.templatetags.fb_tags import query_helper
from filebrowser.base import FileObject
from filebrowser.decorators import flash_login_required
# Precompile regular expressions
filter_re = []
for exp in EXCLUDE:
filter_re.append(re.compile(exp))
for k,v in VERSIONS.iteritems():
exp = (r'_%s.(%s)') % (k, '|'.join(EXTENSION_LIST))
filter_re.append(re.compile(exp))
def browse(request):
"""
Browse Files/Directories.
"""
# QUERY / PATH CHECK
query = request.GET.copy()
path = get_path(query.get('dir', ''))
directory = get_path('')
q = request.GET.get('q')
if path is None:
msg = _('The requested Folder does not exist.')
request.user.message_set.create(message=msg)
if directory is None:
# The DIRECTORY does not exist, raise an error to prevent eternal redirecting.
raise ImproperlyConfigured, _("Error finding Upload-Folder. Maybe it does not exist?")
redirect_url = reverse("fb_browse") + query_helper(query, "", "dir")
return HttpResponseRedirect(redirect_url)
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
# INITIAL VARIABLES
results_var = {'results_total': 0, 'results_current': 0, 'delete_total': 0, 'images_total': 0, 'select_total': 0 }
counter = {}
for k,v in EXTENSIONS.iteritems():
counter[k] = 0
if q:
m_root = os.path.normpath(MEDIA_ROOT)
dirs = [
[(os.path.normpath(root)[len(m_root)+1:], f) for f in filenames]
for root, _subdirs, filenames in os.walk(abs_path)
]
dir_list = itertools.chain(*dirs)
else:
root = os.path.join(DIRECTORY, path)
dir_list = ((root, f) for f in os.listdir(abs_path))
files = []
for file_dir, file in dir_list:
# EXCLUDE FILES MATCHING VERSIONS_PREFIX OR ANY OF THE EXCLUDE PATTERNS
filtered = file.startswith('.')
for re_prefix in filter_re:
if re_prefix.search(file):
filtered = True
if filtered:
continue
results_var['results_total'] += 1
# CREATE FILEOBJECT
fileobject = FileObject(os.path.join(file_dir, file))
# FILTER / SEARCH
append = False
if fileobject.filetype == request.GET.get('filter_type', fileobject.filetype) and get_filterdate(request.GET.get('filter_date', ''), fileobject.date or 0):
append = True
if q and not re.compile(q.lower(), re.M).search(file.lower()):
append = False
# APPEND FILE_LIST
if append:
try:
# COUNTER/RESULTS
if fileobject.filetype == 'Image':
results_var['images_total'] += 1
if fileobject.filetype != 'Folder':
results_var['delete_total'] += 1
elif fileobject.filetype == 'Folder' and fileobject.is_empty:
results_var['delete_total'] += 1
if query.get('type') and query.get('type') in SELECT_FORMATS and fileobject.filetype in SELECT_FORMATS[query.get('type')]:
results_var['select_total'] += 1
elif not query.get('type'):
results_var['select_total'] += 1
except OSError:
# Ignore items that have problems
continue
else:
files.append(fileobject)
results_var['results_current'] += 1
# COUNTER/RESULTS
if fileobject.filetype:
counter[fileobject.filetype] += 1
# SORTING
query['o'] = request.GET.get('o', DEFAULT_SORTING_BY)
query['ot'] = request.GET.get('ot', DEFAULT_SORTING_ORDER)
folders = [f for f in files if f.filetype == 'Folder']
folders = sort_by_attr(folders, 'filename')
files = [f for f in files if f.filetype != 'Folder']
files = sort_by_attr(files, request.GET.get('o', DEFAULT_SORTING_BY))
if not request.GET.get('ot') and DEFAULT_SORTING_ORDER == "desc" or request.GET.get('ot') == "desc":
files.reverse()
p = Paginator(files, LIST_PER_PAGE)
try:
page_nr = request.GET.get('p', '1')
except:
page_nr = 1
try:
page = p.page(page_nr)
except (EmptyPage, InvalidPage):
page = p.page(p.num_pages)
return render_to_response('filebrowser/index.html', {
'dir': path,
'p': p,
'q': q,
'page': page,
'folders': folders,
'results_var': results_var,
'counter': counter,
'query': query,
'title': _(u'FileBrowser'),
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query, path),
'breadcrumbs_title': ""
}, context_instance=Context(request))
browse = staff_member_required(never_cache(browse))
# mkdir signals
filebrowser_pre_createdir = Signal(providing_args=["path", "dirname"])
filebrowser_post_createdir = Signal(providing_args=["path", "dirname"])
def mkdir(request):
"""
Make Directory.
"""
from filebrowser.forms import MakeDirForm
# QUERY / PATH CHECK
query = request.GET
path = get_path(query.get('dir', ''))
if path is None:
msg = _('The requested Folder does not exist.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(reverse("fb_browse"))
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
if request.method == 'POST':
form = MakeDirForm(abs_path, request.POST)
if form.is_valid():
server_path = os.path.join(abs_path, form.cleaned_data['dir_name'])
try:
# PRE CREATE SIGNAL
filebrowser_pre_createdir.send(sender=request, path=path, dirname=form.cleaned_data['dir_name'])
# CREATE FOLDER
os.mkdir(server_path)
os.chmod(server_path, 0775)
# POST CREATE SIGNAL
filebrowser_post_createdir.send(sender=request, path=path, dirname=form.cleaned_data['dir_name'])
# MESSAGE & REDIRECT
msg = _('The Folder %s was successfully created.') % (form.cleaned_data['dir_name'])
request.user.message_set.create(message=msg)
# on redirect, sort by date desc to see the new directory on top of the list
# remove filter in order to actually _see_ the new folder
# remove pagination
redirect_url = reverse("fb_browse") + query_helper(query, "ot=desc,o=date", "ot,o,filter_type,filter_date,q,p")
return HttpResponseRedirect(redirect_url)
except OSError, (errno, strerror):
if errno == 13:
form.errors['dir_name'] = forms.util.ErrorList([_('Permission denied.')])
else:
form.errors['dir_name'] = forms.util.ErrorList([_('Error creating folder.')])
else:
form = MakeDirForm(abs_path)
return render_to_response('filebrowser/makedir.html', {
'form': form,
'query': query,
'title': _(u'New Folder'),
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query, path),
'breadcrumbs_title': _(u'New Folder')
}, context_instance=Context(request))
mkdir = staff_member_required(never_cache(mkdir))
def upload(request):
"""
Multipe File Upload.
"""
from django.http import parse_cookie
# QUERY / PATH CHECK
query = request.GET
path = get_path(query.get('dir', ''))
if path is None:
msg = _('The requested Folder does not exist.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(reverse("fb_browse"))
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
# SESSION (used for flash-uploading)
cookie_dict = parse_cookie(request.META.get('HTTP_COOKIE', ''))
engine = __import__(settings.SESSION_ENGINE, {}, {}, [''])
session_key = cookie_dict.get(settings.SESSION_COOKIE_NAME, None)
return render_to_response('filebrowser/upload.html', {
'query': query,
'title': _(u'Select files to upload'),
'settings_var': get_settings_var(),
'session_key': session_key,
'breadcrumbs': get_breadcrumbs(query, path),
'breadcrumbs_title': _(u'Upload')
}, context_instance=Context(request))
upload = staff_member_required(never_cache(upload))
@csrf_exempt
def _check_file(request):
"""
Check if file already exists on the server.
"""
from django.utils import simplejson
folder = request.POST.get('folder')
fb_uploadurl_re = re.compile(r'^.*(%s)' % reverse("fb_upload"))
folder = fb_uploadurl_re.sub('', folder)
fileArray = {}
if request.method == 'POST':
for k,v in request.POST.items():
if k != "folder":
v = convert_filename(v)
if os.path.isfile(os.path.join(MEDIA_ROOT, DIRECTORY, folder, v)):
fileArray[k] = v
return HttpResponse(simplejson.dumps(fileArray))
# upload signals
filebrowser_pre_upload = Signal(providing_args=["path", "file"])
filebrowser_post_upload = Signal(providing_args=["path", "file"])
@csrf_exempt
@flash_login_required
def _upload_file(request):
"""
Upload file to the server.
"""
from django.core.files.move import file_move_safe
if request.method == 'POST':
folder = request.POST.get('folder')
fb_uploadurl_re = re.compile(r'^.*(%s)' % reverse("fb_upload"))
folder = fb_uploadurl_re.sub('', folder)
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, folder)
if request.FILES:
filedata = request.FILES['Filedata']
filedata.name = convert_filename(filedata.name)
# PRE UPLOAD SIGNAL
filebrowser_pre_upload.send(sender=request, path=request.POST.get('folder'), file=filedata)
# HANDLE UPLOAD
uploadedfile = handle_file_upload(abs_path, filedata)
# MOVE UPLOADED FILE
# if file already exists
if os.path.isfile(os.path.join(MEDIA_ROOT, DIRECTORY, folder, filedata.name)):
old_file = os.path.join(abs_path, filedata.name)
new_file = os.path.join(abs_path, uploadedfile)
file_move_safe(new_file, old_file)
# POST UPLOAD SIGNAL
filebrowser_post_upload.send(sender=request, path=request.POST.get('folder'), file=FileObject(os.path.join(DIRECTORY, folder, filedata.name)))
return HttpResponse('True')
#_upload_file = flash_login_required(_upload_file)
# delete signals
filebrowser_pre_delete = Signal(providing_args=["path", "filename"])
filebrowser_post_delete = Signal(providing_args=["path", "filename"])
def delete(request):
"""
Delete existing File/Directory.
When trying to delete a Directory, the Directory has to be empty.
"""
# QUERY / PATH CHECK
query = request.GET
path = get_path(query.get('dir', ''))
filename = get_file(query.get('dir', ''), query.get('filename', ''))
if path is None or filename is None:
if path is None:
msg = _('The requested Folder does not exist.')
else:
msg = _('The requested File does not exist.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(reverse("fb_browse"))
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
msg = ""
if request.GET:
if request.GET.get('filetype') != "Folder":
relative_server_path = os.path.join(DIRECTORY, path, filename)
try:
# PRE DELETE SIGNAL
filebrowser_pre_delete.send(sender=request, path=path, filename=filename)
# DELETE IMAGE VERSIONS/THUMBNAILS
for version in VERSIONS:
try:
os.unlink(os.path.join(MEDIA_ROOT, get_version_path(relative_server_path, version)))
except:
pass
# DELETE FILE
os.unlink(os.path.join(abs_path, filename))
# POST DELETE SIGNAL
filebrowser_post_delete.send(sender=request, path=path, filename=filename)
# MESSAGE & REDIRECT
msg = _('The file %s was successfully deleted.') % (filename.lower())
request.user.message_set.create(message=msg)
redirect_url = reverse("fb_browse") + query_helper(query, "", "filename,filetype")
return HttpResponseRedirect(redirect_url)
except OSError:
# todo: define error message
msg = OSError
else:
try:
# PRE DELETE SIGNAL
filebrowser_pre_delete.send(sender=request, path=path, filename=filename)
# DELETE FOLDER
os.rmdir(os.path.join(abs_path, filename))
# POST DELETE SIGNAL
filebrowser_post_delete.send(sender=request, path=path, filename=filename)
# MESSAGE & REDIRECT
msg = _('The folder %s was successfully deleted.') % (filename.lower())
request.user.message_set.create(message=msg)
redirect_url = reverse("fb_browse") + query_helper(query, "", "filename,filetype")
return HttpResponseRedirect(redirect_url)
except OSError:
# todo: define error message
msg = OSError
if msg:
request.user.message_set.create(message=msg)
return render_to_response('filebrowser/index.html', {
'dir': dir_name,
'file': request.GET.get('filename', ''),
'query': query,
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query, dir_name),
'breadcrumbs_title': ""
}, context_instance=Context(request))
delete = staff_member_required(never_cache(delete))
# rename signals
filebrowser_pre_rename = Signal(providing_args=["path", "filename", "new_filename"])
filebrowser_post_rename = Signal(providing_args=["path", "filename", "new_filename"])
def rename(request):
"""
Rename existing File/Directory.
Includes renaming existing Image Versions/Thumbnails.
"""
from filebrowser.forms import RenameForm
# QUERY / PATH CHECK
query = request.GET
path = get_path(query.get('dir', ''))
filename = get_file(query.get('dir', ''), query.get('filename', ''))
if path is None or filename is None:
if path is None:
msg = _('The requested Folder does not exist.')
else:
msg = _('The requested File does not exist.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(reverse("fb_browse"))
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
file_extension = os.path.splitext(filename)[1].lower()
if request.method == 'POST':
form = RenameForm(abs_path, file_extension, request.POST)
if form.is_valid():
relative_server_path = os.path.join(DIRECTORY, path, filename)
new_filename = form.cleaned_data['name'] + file_extension
new_relative_server_path = os.path.join(DIRECTORY, path, new_filename)
try:
# PRE RENAME SIGNAL
filebrowser_pre_rename.send(sender=request, path=path, filename=filename, new_filename=new_filename)
# DELETE IMAGE VERSIONS/THUMBNAILS
# regenerating versions/thumbs will be done automatically
for version in VERSIONS:
try:
os.unlink(os.path.join(MEDIA_ROOT, get_version_path(relative_server_path, version)))
except:
pass
# RENAME ORIGINAL
os.rename(os.path.join(MEDIA_ROOT, relative_server_path), os.path.join(MEDIA_ROOT, new_relative_server_path))
# POST RENAME SIGNAL
filebrowser_post_rename.send(sender=request, path=path, filename=filename, new_filename=new_filename)
# MESSAGE & REDIRECT
msg = _('Renaming was successful.')
request.user.message_set.create(message=msg)
redirect_url = reverse("fb_browse") + query_helper(query, "", "filename")
return HttpResponseRedirect(redirect_url)
except OSError, (errno, strerror):
form.errors['name'] = forms.util.ErrorList([_('Error.')])
else:
form = RenameForm(abs_path, file_extension)
return render_to_response('filebrowser/rename.html', {
'form': form,
'query': query,
'file_extension': file_extension,
'title': _(u'Rename "%s"') % filename,
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query, path),
'breadcrumbs_title': _(u'Rename')
}, context_instance=Context(request))
rename = staff_member_required(never_cache(rename))
def versions(request):
"""
Show all Versions for an Image according to ADMIN_VERSIONS.
"""
# QUERY / PATH CHECK
query = request.GET
path = get_path(query.get('dir', ''))
filename = get_file(query.get('dir', ''), query.get('filename', ''))
if path is None or filename is None:
if path is None:
msg = _('The requested Folder does not exist.')
else:
msg = _('The requested File does not exist.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(reverse("fb_browse"))
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
return render_to_response('filebrowser/versions.html', {
'original': path_to_url(os.path.join(DIRECTORY, path, filename)),
'query': query,
'title': _(u'Versions for "%s"') % filename,
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query, path),
'breadcrumbs_title': _(u'Versions for "%s"') % filename
}, context_instance=Context(request))
versions = staff_member_required(never_cache(versions))
| {
"content_hash": "63d36302770a30de365399a9342bcd66",
"timestamp": "",
"source": "github",
"line_count": 490,
"max_line_length": 186,
"avg_line_length": 39.679591836734694,
"alnum_prop": 0.5992902329887363,
"repo_name": "dwaiter/django-filebrowser-old",
"id": "18ce01918a3a1eb7cb84f9083847069c53256d08",
"size": "19478",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "filebrowser/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "321037"
},
{
"name": "JavaScript",
"bytes": "169792"
},
{
"name": "PHP",
"bytes": "7956"
},
{
"name": "Python",
"bytes": "67745"
}
],
"symlink_target": ""
} |
import logging
from pajbot.managers.adminlog import AdminLogManager
from pajbot.managers.db import DBManager
from pajbot.models.command import Command
from pajbot.models.command import CommandExample
from pajbot.models.module import Module
from pajbot.models.user import User
from pajbot.modules import BaseModule
from pajbot.modules import ModuleType
from pajbot.modules.basic import BasicCommandsModule
from pajbot.utils import split_into_chunks_with_prefix
log = logging.getLogger(__name__)
class AdminCommandsModule(BaseModule):
ID = __name__.split(".")[-1]
NAME = "Admin Commands"
DESCRIPTION = "All miscellaneous admin commands"
CATEGORY = "Feature"
ENABLED_DEFAULT = True
MODULE_TYPE = ModuleType.TYPE_ALWAYS_ENABLED
PARENT_MODULE = BasicCommandsModule
@staticmethod
def whisper(bot, message, **rest):
if not message:
return False
msg_args = message.split(" ")
if len(msg_args) > 1:
username = msg_args[0]
rest = " ".join(msg_args[1:])
bot.whisper_login(username, rest)
def edit_points(self, bot, source, message, **rest):
if not message:
return False
msg_split = message.split(" ")
if len(msg_split) < 2:
# The user did not supply enough arguments
bot.whisper(source, f"Usage: !{self.command_name} USERNAME POINTS")
return False
username_input = msg_split[0]
try:
num_points = int(msg_split[1])
except (ValueError, TypeError):
# The user did not specify a valid integer for points
bot.whisper(source, f"Invalid amount of points. Usage: !{self.command_name} USERNAME POINTS")
return False
with DBManager.create_session_scope() as db_session:
user = User.find_by_user_input(db_session, username_input)
if not user:
bot.whisper(source, "This user does not exist FailFish")
return False
user.points += num_points
if num_points >= 0:
bot.whisper(source, f"Successfully gave {user} {num_points} points.")
else:
bot.whisper(source, f"Successfully removed {abs(num_points)} points from {user}.")
def set_points(self, bot, source, message, **rest):
if not message:
return False
msg_split = message.split(" ")
if len(msg_split) < 2:
# The user did not supply enough arguments
bot.whisper(source, f"Usage: !{self.command_name} USERNAME POINTS")
return False
username = msg_split[0]
if len(username) < 2:
# The username specified was too short. ;-)
return False
try:
num_points = int(msg_split[1])
except (ValueError, TypeError):
# The user did not specify a valid integer for points
bot.whisper(source, f"Invalid amount of points. Usage: !{self.command_name} USERNAME POINTS")
return False
with DBManager.create_session_scope() as db_session:
user = User.find_by_user_input(db_session, username)
if not user:
bot.whisper(source, "This user does not exist FailFish")
return False
user.points = num_points
bot.whisper(source, f"Successfully set {user}'s points to {num_points}.")
@staticmethod
def level(bot, source, message, **rest):
if not message:
bot.whisper(source, "Usage: !level USERNAME NEW_LEVEL")
return False
msg_args = message.split(" ")
if len(msg_args) < 2:
return False
username = msg_args[0].lower()
new_level = int(msg_args[1])
if new_level >= source.level:
bot.whisper(source, f"You cannot promote someone to the same or higher level as you ({source.level}).")
return False
# We create the user if the user didn't already exist in the database.
with DBManager.create_session_scope() as db_session:
user = User.find_or_create_from_user_input(db_session, bot.twitch_helix_api, username)
if user is None:
bot.whisper(source, f'A user with the name "{username}" could not be found.')
return False
if user.level >= source.level:
bot.whisper(
source,
f"You cannot change the level of someone who is the same or higher level than you. You are level {source.level}, and {username} is level {user.level}",
)
return False
old_level = user.level
user.level = new_level
log_msg = f"{user}'s user level changed from {old_level} to {new_level}"
bot.whisper(source, log_msg)
AdminLogManager.add_entry("Userlevel edited", source, log_msg)
@staticmethod
def cmd_silence(bot, source, **rest):
if bot.silent:
bot.whisper(source, "The bot is already silent")
else:
bot.silent = True
bot.whisper(
source,
"The bot is now silent. Use !unsilence to enable messages again. Note that this option does not stick in case the bot crashes or restarts",
)
@staticmethod
def cmd_unsilence(bot, source, **rest):
if not bot.silent:
bot.whisper(source, "The bot can already talk")
else:
bot.silent = False
bot.whisper(source, "The bot can now talk again")
@staticmethod
def cmd_module(bot, source, message, **options):
module_manager = bot.module_manager
if not message:
return
msg_args = message.split(" ")
if len(msg_args) < 1:
return
sub_command = msg_args[0].lower()
if sub_command == "list":
messages = split_into_chunks_with_prefix(
[{"prefix": "Available modules:", "parts": [module.ID for module in module_manager.all_modules]}],
" ",
default="No modules available.",
)
for message in messages:
bot.say(message)
elif sub_command == "disable":
if len(msg_args) < 2:
return
module_id = msg_args[1].lower()
module = module_manager.get_module(module_id)
if not module:
bot.say(f"No module with the id {module_id} found")
return
if module.MODULE_TYPE > ModuleType.TYPE_NORMAL:
bot.say(f"Unable to disable module {module_id}")
return
if not module_manager.disable_module(module_id):
bot.say(f"Unable to disable module {module_id}, maybe it's not enabled?")
return
# Rebuild command cache
bot.commands.rebuild()
with DBManager.create_session_scope() as db_session:
db_module = db_session.query(Module).filter_by(id=module_id).one()
db_module.enabled = False
AdminLogManager.post("Module toggled", source, "Disabled", module_id)
bot.say(f"Disabled module {module_id}")
elif sub_command == "enable":
if len(msg_args) < 2:
return
module_id = msg_args[1].lower()
module = module_manager.get_module(module_id)
if not module:
bot.say(f"No module with the id {module_id} found")
return
if module.MODULE_TYPE > ModuleType.TYPE_NORMAL:
bot.say(f"Unable to enable module {module_id}")
return
if not module_manager.enable_module(module_id):
bot.say(f"Unable to enable module {module_id}, maybe it's already enabled?")
return
# Rebuild command cache
bot.commands.rebuild()
with DBManager.create_session_scope() as db_session:
db_module = db_session.query(Module).filter_by(id=module_id).one()
db_module.enabled = True
AdminLogManager.post("Module toggled", source, "Enabled", module_id)
bot.say(f"Enabled module {module_id}")
def load_commands(self, **options):
self.commands["w"] = Command.raw_command(self.whisper, level=2000, description="Send a whisper from the bot")
self.commands["editpoints"] = Command.raw_command(
self.edit_points,
level=1500,
description="Modifies a user's points",
examples=[
CommandExample(
None,
"Give a user points",
chat="user:!editpoints pajlada 500\n" "bot>user:Successfully gave pajlada 500 points.",
description="This creates 500 points and gives them to pajlada",
).parse(),
CommandExample(
None,
"Remove points from a user",
chat="user:!editpoints pajlada -500\n" "bot>user:Successfully removed 500 points from pajlada.",
description="This removes 500 points from pajlada. Users can go into negative points with this.",
).parse(),
],
)
self.commands["setpoints"] = Command.raw_command(
self.set_points,
level=1500,
description="Sets a user's points",
examples=[
CommandExample(
None,
"Set a user's points",
chat="user:!setpoints pajlada 500\n" "bot>user:Successfully set pajlada's points to 500.",
description="This sets pajlada's points to 500.",
).parse()
],
)
self.commands["level"] = Command.raw_command(self.level, level=1000, description="Set a users level")
self.commands["silence"] = Command.raw_command(self.cmd_silence, level=500, description="Silence the bot")
self.commands["mute"] = self.commands["silence"]
self.commands["unsilence"] = Command.raw_command(self.cmd_unsilence, level=500, description="Unsilence the bot")
self.commands["unmute"] = self.commands["unsilence"]
self.commands["module"] = Command.raw_command(
self.cmd_module, level=500, description="Modify module", delay_all=0, delay_user=0
)
| {
"content_hash": "e4562b1bf1067db184ef52655822db48",
"timestamp": "",
"source": "github",
"line_count": 283,
"max_line_length": 171,
"avg_line_length": 37.332155477031804,
"alnum_prop": 0.5699006152389967,
"repo_name": "pajlada/pajbot",
"id": "d81314865dcaa15cd588c98e36593314279f8ff4",
"size": "10565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pajbot/modules/basic/admincommands.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11288"
},
{
"name": "HTML",
"bytes": "129576"
},
{
"name": "JavaScript",
"bytes": "202450"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "987601"
},
{
"name": "Shell",
"bytes": "589"
}
],
"symlink_target": ""
} |
"""Tests scenario for microvms with max vcpus(32)."""
import host_tools.network as net_tools # pylint: disable=import-error
MAX_VCPUS = 32
def test_max_vcpus(test_microvm_with_api, network_config):
"""
Test if all configured guest vcpus are online.
@type: functional
"""
microvm = test_microvm_with_api
microvm.spawn()
# Configure a microVM with 32 vCPUs.
microvm.basic_config(vcpu_count=MAX_VCPUS)
_tap, _, _ = microvm.ssh_network_config(network_config, "1")
microvm.start()
ssh_connection = net_tools.SSHConnection(microvm.ssh_config)
cmd = "nproc"
_, stdout, stderr = ssh_connection.execute_command(cmd)
assert stderr.read() == ""
assert int(stdout.read()) == MAX_VCPUS
| {
"content_hash": "734ecd64a9d60d6b13295425040b540e",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 70,
"avg_line_length": 28.5,
"alnum_prop": 0.6707152496626181,
"repo_name": "firecracker-microvm/firecracker",
"id": "8c88eda13701f9a3aacc7ea94c21e26613796238",
"size": "853",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/integration_tests/functional/test_max_vcpus.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "14327"
},
{
"name": "Python",
"bytes": "833110"
},
{
"name": "Ruby",
"bytes": "387"
},
{
"name": "Rust",
"bytes": "2944085"
},
{
"name": "Shell",
"bytes": "100226"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from requests.compat import is_py3
from requests.utils import to_native_string
from oauthlib.common import extract_params
from oauthlib.oauth1 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
if is_py3:
unicode = str
# OBS!: Correct signing of requests are conditional on invoking OAuth1
# as the last step of preparing a request, or at least having the
# content-type set properly.
class OAuth1(object):
"""Signs the request using OAuth 1 (RFC5849)"""
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None, verifier=None,
decoding='utf-8'):
try:
signature_type = signature_type.upper()
except AttributeError:
pass
self.client = Client(client_key, client_secret, resource_owner_key,
resource_owner_secret, callback_uri, signature_method,
signature_type, rsa_key, verifier, decoding=decoding)
def __call__(self, r):
"""Add OAuth parameters to the request.
Parameters may be included from the body if the content-type is
urlencoded, if no content type is set a guess is made.
"""
# Overwriting url is safe here as request will not modify it past
# this point.
content_type = r.headers.get('Content-Type', '')
if not content_type and extract_params(r.body):
content_type = CONTENT_TYPE_FORM_URLENCODED
if not isinstance(content_type, unicode):
content_type = content_type.decode('utf-8')
is_form_encoded = (CONTENT_TYPE_FORM_URLENCODED in content_type)
if is_form_encoded:
r.headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED
r.url, headers, r.body = self.client.sign(
unicode(r.url), unicode(r.method), r.body or '', r.headers)
else:
# Omit body data in the signing of non form-encoded requests
r.url, headers, _ = self.client.sign(
unicode(r.url), unicode(r.method), None, r.headers)
r.prepare_headers(headers)
r.url = to_native_string(r.url)
return r
| {
"content_hash": "ff816b98cfac36cacd52a3a647e06bb8",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 80,
"avg_line_length": 38.43076923076923,
"alnum_prop": 0.6369095276220976,
"repo_name": "collabspot/muninn",
"id": "1ad189318d0ba2478c949dc975ea936e6fa60ff0",
"size": "2522",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/requests_oauthlib/oauth1_auth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "131644"
},
{
"name": "JavaScript",
"bytes": "907591"
},
{
"name": "Python",
"bytes": "1343756"
},
{
"name": "Shell",
"bytes": "77"
}
],
"symlink_target": ""
} |
from contact_form.forms import ContactForm as BaseContactForm
class ContactForm(BaseContactForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['name'].label = 'Nome'
self.fields['email'].label = 'Email'
self.fields['body'].label = 'Mensagem'
self.fields['body'].help_text =\
'Deixe sua mensagem e nós retornaremos o quanto antes.'
| {
"content_hash": "9ff4fddcb710a8578a8356af403d3a45",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 67,
"avg_line_length": 35.666666666666664,
"alnum_prop": 0.6214953271028038,
"repo_name": "caioariede/openimob",
"id": "593913bbebac82940d15d28d2759799039f4cb2f",
"size": "429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "website/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "57546"
},
{
"name": "HTML",
"bytes": "52533"
},
{
"name": "JavaScript",
"bytes": "33743"
},
{
"name": "Python",
"bytes": "52176"
}
],
"symlink_target": ""
} |
import gi
# import GStreamer and GLib-Helper classes
gi.require_version('Gtk', '3.0')
gi.require_version('Gst', '1.0')
gi.require_version('GstVideo', '1.0')
gi.require_version('GstNet', '1.0')
from gi.repository import Gtk, Gdk, Gst, GstVideo
import signal
import logging
import sys
import os
sys.path.insert(0, '.')
from vocto.debug import gst_log_messages
# check min-version
minGst = (1, 5)
minPy = (3, 0)
Gst.init([])
if Gst.version() < minGst:
raise Exception('GStreamer version', Gst.version(),
'is too old, at least', minGst, 'is required')
if sys.version_info < minPy:
raise Exception('Python version', sys.version_info,
'is too old, at least', minPy, 'is required')
Gdk.init([])
Gtk.init([])
# select Awaita:Dark theme
settings = Gtk.Settings.get_default()
settings.set_property("gtk-theme-name", "Adwaita")
settings.set_property("gtk-application-prefer-dark-theme", True) # if you want use dark theme, set second arg to True
# main class
class Voctogui(object):
def __init__(self):
self.log = logging.getLogger('Voctogui')
from lib.args import Args
from lib.ui import Ui
# Load UI file
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui/voctogui.ui')
self.log.info('Loading ui-file from file %s', path)
if os.path.isfile(path):
self.ui = Ui(path)
else:
raise Exception("Can't find any .ui-Files to use in {}".format(path))
#
# search for a .css style sheet file and load it
#
css_provider = Gtk.CssProvider()
context = Gtk.StyleContext()
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui/voctogui.css')
self.log.info('Loading css-file from file %s', path)
if os.path.isfile(path):
css_provider.load_from_path(path)
else:
raise Exception("Can't find .css file '{}'".format(path))
context.add_provider_for_screen(
Gdk.Screen.get_default(),
css_provider,
Gtk.STYLE_PROVIDER_PRIORITY_USER
)
self.ui.setup()
def run(self):
self.log.info('Setting UI visible')
self.ui.show()
try:
self.log.info('Running.')
Gtk.main()
self.log.info('Connection lost. Exiting.')
except KeyboardInterrupt:
self.log.info('Terminated via Ctrl-C')
def quit(self):
self.log.info('Quitting.')
Gtk.main_quit()
# run mainclass
def main():
# parse command-line args
from lib import args
args.parse()
from lib.args import Args
docolor = (Args.color == 'always') \
or (Args.color == 'auto' and sys.stderr.isatty())
from lib.loghandler import LogHandler
handler = LogHandler(docolor, Args.timestamp)
logging.root.addHandler(handler)
levels = { 3 : logging.DEBUG, 2 : logging.INFO, 1 : logging.WARNING, 0 : logging.ERROR }
logging.root.setLevel(levels[Args.verbose])
gst_levels = { 3 : Gst.DebugLevel.DEBUG, 2 : Gst.DebugLevel.INFO, 1 : Gst.DebugLevel.WARNING, 0 : Gst.DebugLevel.ERROR }
gst_log_messages(gst_levels[Args.gstreamer_log])
# make killable by ctrl-c
logging.debug('setting SIGINT handler')
signal.signal(signal.SIGINT, signal.SIG_DFL)
logging.info('Python Version: %s', sys.version_info)
logging.info('GStreamer Version: %s', Gst.version())
logging.debug('loading Config')
from lib import config
config.load()
from lib.config import Config
# establish a synchronus connection to server
import lib.connection as Connection
Connection.establish(Config.getHost())
# fetch config from server
Config.fetchServerConfig()
# Warn when connecting to a non-local core without preview-encoders enabled
# The list-comparison is not complete
# (one could use a local hostname or the local system ip),
# but it's only here to warn that one might be making a mistake
localhosts = ['::1',
'127.0.0.1',
'localhost']
if not Config.getPreviewsEnabled() and Config.getHost() not in localhosts:
logging.warning(
'Connecting to `%s` (which looks like a remote host) '
'might not work without enabeling the preview encoders '
'(set `[previews] enabled=true` on the core) or it might saturate '
'your ethernet link between the two machines.',
Config.getHost()
)
import lib.connection as Connection
import lib.clock as ClockManager
# obtain network-clock
ClockManager.obtainClock(Connection.ip)
# switch connection to nonblocking, event-driven mode
Connection.enterNonblockingMode()
# init main-class and main-loop
# (this binds all event-hander on the Connection)
logging.debug('initializing Voctogui')
voctogui = Voctogui()
# start the Mainloop and show the Window
logging.debug('running Voctogui')
voctogui.run()
if __name__ == '__main__':
try:
main()
except RuntimeError as e:
logging.error(str(e))
sys.exit(1)
| {
"content_hash": "bff3f49ae10f6061e4335d3c5600fd5a",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 124,
"avg_line_length": 29.965317919075144,
"alnum_prop": 0.6313657407407407,
"repo_name": "voc/voctomix",
"id": "994a1e1fcd359465ca97c359031637fc38b19387",
"size": "5207",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "voctogui/voctogui.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2621"
},
{
"name": "Dockerfile",
"bytes": "2626"
},
{
"name": "Python",
"bytes": "350063"
},
{
"name": "Shell",
"bytes": "25187"
}
],
"symlink_target": ""
} |
mode = 'IFS' # Telescope observing modes: 'IFS', 'Imaging'
lammin = 0.3 # Minimum wavelength (um)
lammax = 2.0 # Maximum wavelength (um)
resolution = 70. # Spectral resolution (lambda / delta-lambda)
throughput = 0.2 # Telescope throughput
diameter = 8.0 # Telescope diameter (m)
Tsys = 274. # Telescope temperature (K)
Tdet = 50. # Detector temperature (K)
IWA = 0.5 # Inner Working Angle (lambda/D)
OWA = 30000. # Outer Working Angle (lambda/D)
emissivity = 0.9 # Telescope emissivity
contrast = 1e-10 # Raw Contrast
darkcurrent = 1e-4 # Dark current (s**-1)
DNHpix = 3. # Horizontal pixel spread of IFS spectrum
readnoise = 0.1 # Read noise per pixel
Dtmax = 1.0 # Maximum exposure time (hr)
X = 0.7 # Size of photometric aperture (lambda/D)
qe = 0.9 # Quantum efficiency
filter_wheel = None # Filter Wheel to use if in Imaging mode
| {
"content_hash": "3067a481e4056a35e71f620ab6504db3",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 66,
"avg_line_length": 46.333333333333336,
"alnum_prop": 0.6115107913669064,
"repo_name": "jlustigy/coronagraph",
"id": "5374265494fbed2c0866a843fce903bca9d40dd6",
"size": "973",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coronagraph/inputs/input_default_telescope.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "208403"
}
],
"symlink_target": ""
} |
from random import choice
from django.db.utils import IntegrityError
from django.template.defaultfilters import slugify
from django.core.management.base import BaseCommand, CommandError
from servo.models import Customer, Order, User, Location, GsxAccount
class Command(BaseCommand):
def handle(self, *args, **options):
help = "Obfuscates the information in this Servo install"
names = ('Daniel Scott', 'Amy Collins', 'Linda Moore',
'Dennis Parker', 'Mark Cox', 'Jesse Clark',
'Brian Patterson', 'Andrew Bennett', 'Frank Lopez',
'Benjamin Wood', 'Michelle Jenkins', 'Alice Lee',
'Lois Gonzales', 'Diane Perez', 'Cheryl Torres',
'Ernest Smith', 'Steve Mitchell', 'Barbara Jones',
'Wanda Roberts', 'Julie Watson', 'Carlos Harris',
'Anthony Phillips', 'Ralph Gray', 'Donna Hill',
'Alan Coleman', 'Lawrence Ross', 'Stephen Flores',
'Robert Simmons', 'Gloria White', 'Doris Wilson',
'Shirley Sanders', 'Matthew Bell', 'Janice Hughes',
'Walter Nelson', 'Gerald Taylor', 'Tammy Martin',
'Gregory Barnes', 'Jonathan Baker', 'Lillian Green',
'Brenda Hernandez', 'Denise Davis', 'Bobby Rogers',
'Joe Lewis', 'Teresa Bailey', 'Craig Russell',
'Angela Rivera', 'Rebecca Jackson', 'Nicole Henderson',
'Kenneth James', 'Nicholas Bryant', 'Anne Washington',
'Irene Miller', 'Theresa Martinez', 'Evelyn Sanchez',
'Richard Anderson', 'Jeffrey Robinson', 'Heather Diaz',
'Joshua Butler', 'Joan Peterson', 'Todd Campbell',
'Timothy Kelly', 'Steven King', 'Norma Reed',
'Carolyn Turner', 'Ruth Evans', 'Carol Thomas',
'Arthur Howard', 'Peter Carter', 'Debra Ramirez',
'Marie Walker', 'Donald Garcia', 'Janet Gonzalez',
'Harold Adams', 'Bonnie Cook', 'Paula Long',
'Bruce Griffin', 'Adam Hall' ,'Annie Young',
'Jacqueline Alexander', 'Kimberly Edwards', 'Sarah Wright',
'Terry Williams', 'Johnny Morris', 'Andrea Ward',
'Margaret Allen', 'Sandra Price', 'Scott Foster',
'Elizabeth Brown', 'Wayne Cooper', 'Mildred Brooks',
'Dorothy Perry', 'Lori Powell', 'Kathryn Murphy',
'Judy Johnson', 'Albert Morgan', 'William Richardson',
'Randy Stewart', 'Roger Thompson', 'Anna Rodriguez',
)
"""
print 'Munging customer names of open orders...'
for i in Order.objects.filter(state=Order.STATE_QUEUED):
if i.customer:
i.customer.name = choice(names)
i.customer.save()
"""
print 'Munging technician names'
users = User.objects.exclude(username='filipp')
newnames = [x.split()[0].lower() for x in names]
oldnames = users.values_list("username", flat=True)
idx = 0
for i in users:
i.first_name, i.last_name = choice(names).split()
i.email = i.username + '@example.com'
i.save()
print 'Munging location names'
a = 65
for i in Location.objects.all():
#i.title = 'Location %s' % chr(a)
i.email = slugify(i.title) + '@example.com'
i.city = 'Cupertino'
i.phone = '0451 202 7' + str(a)
i.address = '1 Infinite Loop'
a += 1
i.save()
print 'Munging GSX account names'
a = 65
for i in GsxAccount.objects.all():
i.title = 'GSX Account %s' % chr(a)
a += 1
i.save()
| {
"content_hash": "34b0ae8d973d3ba736ed14b0ab9acba1",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 71,
"avg_line_length": 45.27160493827161,
"alnum_prop": 0.5674938641941641,
"repo_name": "filipp/Servo",
"id": "86738469be76cbcf3a25f8d02667c5cd17f40ee7",
"size": "5045",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "servo/management/commands/obfuscate.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "114750"
},
{
"name": "HTML",
"bytes": "493143"
},
{
"name": "JavaScript",
"bytes": "430810"
},
{
"name": "Makefile",
"bytes": "297"
},
{
"name": "Python",
"bytes": "998166"
},
{
"name": "Shell",
"bytes": "680"
}
],
"symlink_target": ""
} |
try:
import unittest2 as unittest
except ImportError: # pragma NO COVER
import unittest # noqa
from crabpy.client import (
capakey_factory
)
from crabpy.gateway.capakey import (
CapakeyGateway,
Gemeente,
Afdeling,
Sectie,
Perceel
)
def run_capakey_integration_tests():
from testconfig import config
from crabpy.tests import as_bool
try:
return as_bool(config['capakey']['run_integration_tests'])
except KeyError: # pragma NO COVER
return False
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
class CapakeyGatewayTests(unittest.TestCase):
def setUp(self):
from testconfig import config
self.capakey_client = capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
self.capakey = CapakeyGateway(
self.capakey_client
)
def tearDown(self):
self.capakey_client = None
self.capakey = None
def test_list_gemeenten(self):
res = self.capakey.list_gemeenten()
self.assertIsInstance(res, list)
def test_list_gemeenten_invalid_auth(self):
self.capakey_client = capakey_factory(
user='USER',
password='PASSWORD'
)
self.capakey = CapakeyGateway(
self.capakey_client
)
from crabpy.gateway.exception import GatewayAuthenticationException
with self.assertRaises(GatewayAuthenticationException):
self.capakey.list_gemeenten()
def test_get_gemeente_by_id(self):
res = self.capakey.get_gemeente_by_id(44021)
self.assertIsInstance(res, Gemeente)
self.assertEqual(res.id, 44021)
def test_get_gemeente_by_invalid_id(self):
from crabpy.gateway.exception import GatewayRuntimeException
with self.assertRaises(GatewayRuntimeException):
self.capakey.get_gemeente_by_id('gent')
def test_list_afdelingen(self):
res = self.capakey.list_kadastrale_afdelingen()
self.assertIsInstance(res, list)
self.assertGreater(len(res), 300)
def test_list_afdelingen_by_gemeente(self):
g = self.capakey.get_gemeente_by_id(44021)
res = self.capakey.list_kadastrale_afdelingen_by_gemeente(g)
self.assertIsInstance(res, list)
self.assertGreater(len(res), 0)
self.assertLess(len(res), 40)
def test_list_afdelingen_by_gemeente_id(self):
res = self.capakey.list_kadastrale_afdelingen_by_gemeente(44021)
self.assertIsInstance(res, list)
self.assertGreater(len(res), 0)
self.assertLess(len(res), 40)
def test_get_kadastrale_afdeling_by_id(self):
res = self.capakey.get_kadastrale_afdeling_by_id(44021)
self.assertIsInstance(res, Afdeling)
self.assertEqual(res.id, 44021)
self.assertIsInstance(res.gemeente, Gemeente)
self.assertEqual(res.gemeente.id, 44021)
def test_list_secties_by_afdeling(self):
a = self.capakey.get_kadastrale_afdeling_by_id(44021)
res = self.capakey.list_secties_by_afdeling(a)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
def test_list_secties_by_afdeling_id(self):
res = self.capakey.list_secties_by_afdeling(44021)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
def test_get_sectie_by_id_and_afdeling(self):
a = self.capakey.get_kadastrale_afdeling_by_id(44021)
res = self.capakey.get_sectie_by_id_and_afdeling('A', a)
self.assertIsInstance(res, Sectie)
self.assertEqual(res.id, 'A')
self.assertEqual(res.afdeling.id, 44021)
def test_list_percelen_by_sectie(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
res = self.capakey.list_percelen_by_sectie(s)
self.assertIsInstance(res, list)
self.assertGreater(len(res), 0)
def test_get_perceel_by_id_and_sectie(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
percelen = self.capakey.list_percelen_by_sectie(s)
perc = percelen[0]
res = self.capakey.get_perceel_by_id_and_sectie(perc.id, s)
self.assertIsInstance(res, Perceel)
self.assertEqual(res.sectie.id, 'A')
self.assertEqual(res.sectie.afdeling.id, 44021)
def test_get_perceel_by_capakey(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
percelen = self.capakey.list_percelen_by_sectie(s)
perc = percelen[0]
res = self.capakey.get_perceel_by_capakey(perc.capakey)
self.assertIsInstance(res, Perceel)
self.assertEqual(res.sectie.id, 'A')
self.assertEqual(res.sectie.afdeling.id, 44021)
def test_get_perceel_by_percid(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
percelen = self.capakey.list_percelen_by_sectie(s)
perc = percelen[0]
res = self.capakey.get_perceel_by_percid(perc.percid)
self.assertIsInstance(res, Perceel)
self.assertEqual(res.sectie.id, 'A')
self.assertEqual(res.sectie.afdeling.id, 44021)
class GemeenteTests(unittest.TestCase):
def test_fully_initialised(self):
g = Gemeente(
44021,
'Gent',
(104154.2225, 197300.703),
(94653.453, 185680.984, 113654.992, 208920.422)
)
self.assertEqual(g.id, 44021)
self.assertEqual(g.naam, 'Gent')
self.assertEqual(g.centroid, (104154.2225, 197300.703))
self.assertEqual(
g.bounding_box,
(94653.453, 185680.984, 113654.992, 208920.422)
)
self.assertEqual('Gent (44021)', str(g))
self.assertEqual("Gemeente(44021, 'Gent')", repr(g))
def test_str_and_repr_dont_lazy_load(self):
g = Gemeente(44021)
self.assertEqual('Gemeente 44021', str(g))
self.assertEqual('Gemeente(44021)', repr(g))
def test_check_gateway_not_set(self):
g = Gemeente(44021)
self.assertRaises(RuntimeError, g.check_gateway)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_lazy_load(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
g = Gemeente(44021)
g.set_gateway(capakey)
self.assertEqual(g.id, 44021)
self.assertEqual(g.naam, 'Gent')
self.assertIsNotNone(g.centroid)
self.assertIsNotNone(g.bounding_box)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_afdelingen(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
g = Gemeente(44021)
g.set_gateway(capakey)
afdelingen = g.afdelingen
self.assertIsInstance(afdelingen, list)
self.assertGreater(len(afdelingen), 0)
self.assertLess(len(afdelingen), 40)
class AfdelingTests(unittest.TestCase):
def test_fully_initialised(self):
a = Afdeling(
44021,
'GENT 1 AFD',
Gemeente(44021, 'Gent'),
(104893.06375, 196022.244094),
(104002.076625, 194168.3415, 105784.050875, 197876.146688)
)
self.assertEqual(a.id, 44021)
self.assertEqual(a.naam, 'GENT 1 AFD')
self.assertEqual(a.centroid, (104893.06375, 196022.244094))
self.assertEqual(
a.bounding_box,
(104002.076625, 194168.3415, 105784.050875, 197876.146688)
)
self.assertEqual('GENT 1 AFD (44021)', str(a))
self.assertEqual("Afdeling(44021, 'GENT 1 AFD')", repr(a))
def test_to_string_not_fully_initialised(self):
a = Afdeling(
44021
)
self.assertEqual('Afdeling 44021', str(a))
def test_check_gateway_not_set(self):
a = Afdeling(44021)
self.assertRaises(RuntimeError, a.check_gateway)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_lazy_load(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
a = Afdeling(44021)
a.set_gateway(capakey)
self.assertEqual(a.id, 44021)
self.assertEqual(a.naam, 'GENT 1 AFD')
self.assertIsNotNone(a.centroid)
self.assertIsNotNone(a.bounding_box)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_secties(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
a = Afdeling(44021)
a.set_gateway(capakey)
secties = a.secties
self.assertIsInstance(secties, list)
self.assertEqual(len(secties), 1)
class SectieTests(unittest.TestCase):
def test_fully_initialised(self):
s = Sectie(
'A',
Afdeling(44021, 'Gent 1 AFD'),
(104893.06375, 196022.244094),
(104002.076625, 194168.3415, 105784.050875, 197876.146688)
)
self.assertEqual(s.id, 'A')
self.assertEqual(s.centroid, (104893.06375, 196022.244094))
self.assertEqual(
s.bounding_box,
(104002.076625, 194168.3415, 105784.050875, 197876.146688)
)
self.assertEqual('Gent 1 AFD (44021), Sectie A', str(s))
self.assertEqual(
"Sectie('A', Afdeling(44021, 'Gent 1 AFD'))",
repr(s)
)
def test_check_gateway_not_set(self):
s = Sectie('A', Afdeling(44021))
self.assertRaises(RuntimeError, s.check_gateway)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_lazy_load(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
s = Sectie(
'A',
Afdeling(44021)
)
s.set_gateway(capakey)
self.assertEqual(s.id, 'A')
self.assertEqual(s.afdeling.id, 44021)
self.assertIsNotNone(s.centroid)
self.assertIsNotNone(s.bounding_box)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_percelen(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
s = Sectie(
'A',
Afdeling(44021)
)
s.set_gateway(capakey)
percelen = s.percelen
self.assertIsInstance(percelen, list)
self.assertGreater(len(percelen), 0)
class PerceelTests(unittest.TestCase):
def test_fully_initialised(self):
p = Perceel(
'1154/02C000', Sectie('A', Afdeling(46013)),
'40613A1154/02C000', '40613_A_1154_C_000_02',
'capaty', 'cashkey',
(104893.06375, 196022.244094),
(104002.076625, 194168.3415, 105784.050875, 197876.146688)
)
self.assertEqual(p.id, ('1154/02C000'))
self.assertEqual(p.sectie.id, 'A')
self.assertEqual(
p.centroid,
(104893.06375, 196022.244094)
)
self.assertEqual(
p.bounding_box,
(104002.076625, 194168.3415, 105784.050875, 197876.146688)
)
self.assertEqual(p.capakey, str(p))
self.assertEqual(
"Perceel('1154/02C000', Sectie('A', Afdeling(46013)), '40613A1154/02C000', '40613_A_1154_C_000_02')",
repr(p)
)
def test_check_gateway_not_set(self):
p = Perceel(
'1154/02C000', Sectie('A', Afdeling(46013)),
'40613A1154/02C000', '40613_A_1154_C_000_02'
)
self.assertRaises(RuntimeError, p.check_gateway)
@unittest.skipUnless(
run_capakey_integration_tests(),
'No CAPAKEY Integration tests required'
)
def test_lazy_load(self):
from testconfig import config
capakey = CapakeyGateway(
capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
)
p = Perceel(
'1154/02C000', Sectie('A', Afdeling(46013)),
'40613A1154/02C000', '40613_A_1154_C_000_02',
gateway=capakey
)
self.assertEqual(p.id, '1154/02C000')
self.assertEqual(p.sectie.id, 'A')
self.assertEqual(p.sectie.afdeling.id, 46013)
self.assertIsNotNone(p.centroid)
self.assertIsNotNone(p.bounding_box)
def test_parse_capakey(self):
p = Perceel(
'1154/02C000', Sectie('A', Afdeling(46013)),
'40613A1154/02C000', '40613_A_1154_C_000_02'
)
self.assertEqual(p.grondnummer, '1154')
self.assertEqual(p.bisnummer, '02')
self.assertEqual(p.exponent, 'C')
self.assertEqual(p.macht, '000')
def test_parse_capakey_other_sectie(self):
p = Perceel(
'1154/02C000', Sectie('F', Afdeling(46013)),
'40613F1154/02C000', '40613_F_1154_C_000_02'
)
self.assertEqual(p.grondnummer, '1154')
self.assertEqual(p.bisnummer, '02')
self.assertEqual(p.exponent, 'C')
self.assertEqual(p.macht, '000')
def test_parse_invalid_capakey(self):
with self.assertRaises(ValueError):
Perceel(
'1154/02C000', Sectie('A', Afdeling(46013)),
'40613_A_1154_C_000_02',
'40613A1154/02C000',
)
@unittest.skipUnless(run_capakey_integration_tests(), 'No CAPAKEY Integration tests required')
class CapakeyCachedGatewayTests(unittest.TestCase):
def setUp(self):
from testconfig import config
self.capakey_client = capakey_factory(
user=config['capakey']['user'],
password=config['capakey']['password']
)
self.capakey = CapakeyGateway(
self.capakey_client,
cache_config = {
'permanent.backend': 'dogpile.cache.memory',
'permanent.expiration_time': 86400,
'long.backend': 'dogpile.cache.memory',
'long.expiration_time': 3600,
'short.backend': 'dogpile.cache.memory',
'short.expiration_time': 600,
}
)
def tearDown(self):
self.capakey_client = None
self.capakey = None
def test_cache_is_configured(self):
from dogpile.cache.backends.memory import MemoryBackend
self.assertIsInstance(
self.capakey.caches['permanent'].backend,
MemoryBackend
)
self.assertTrue(self.capakey.caches['permanent'].is_configured)
def test_list_gemeenten(self):
res = self.capakey.list_gemeenten()
self.assertIsInstance(res, list)
self.assertEqual(
self.capakey.caches['permanent'].get('ListAdmGemeenten#1'),
res
)
def test_list_gemeenten_different_sort(self):
res = self.capakey.list_gemeenten(2)
self.assertIsInstance(res, list)
self.assertEqual(
self.capakey.caches['permanent'].get('ListAdmGemeenten#2'),
res
)
from dogpile.cache.api import NO_VALUE
self.assertEqual(
self.capakey.caches['permanent'].get('ListAdmGemeenten#1'),
NO_VALUE
)
def test_get_gemeente_by_id(self):
res = self.capakey.get_gemeente_by_id(44021)
self.assertIsInstance(res, Gemeente)
self.assertEqual(
self.capakey.caches['long'].get('GetAdmGemeenteByNiscode#44021'),
res
)
def test_list_afdelingen(self):
res = self.capakey.list_kadastrale_afdelingen()
self.assertIsInstance(res, list)
self.assertEqual(
self.capakey.caches['permanent'].get('ListKadAfdelingen#1'),
res
)
def test_list_afdelingen_by_gemeente(self):
g = self.capakey.get_gemeente_by_id(44021)
self.assertEqual(
self.capakey.caches['long'].get('GetAdmGemeenteByNiscode#44021'),
g
)
res = self.capakey.list_kadastrale_afdelingen_by_gemeente(g)
self.assertIsInstance(res, list)
self.assertEqual(
self.capakey.caches['permanent'].get('ListKadAfdelingenByNiscode#44021#1'),
res
)
def test_get_kadastrale_afdeling_by_id(self):
res = self.capakey.get_kadastrale_afdeling_by_id(44021)
self.assertIsInstance(res, Afdeling)
self.assertEqual(res.id, 44021)
self.assertIsInstance(res.gemeente, Gemeente)
self.assertEqual(res.gemeente.id, 44021)
self.assertEqual(
self.capakey.caches['long'].get('GetKadAfdelingByKadAfdelingcode#44021'),
res
)
def test_list_secties_by_afdeling_id(self):
res = self.capakey.list_secties_by_afdeling(44021)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
self.assertEqual(
self.capakey.caches['long'].get('ListKadSectiesByKadAfdelingcode#44021'),
res
)
def test_get_sectie_by_id_and_afdeling(self):
a = self.capakey.get_kadastrale_afdeling_by_id(44021)
res = self.capakey.get_sectie_by_id_and_afdeling('A', a)
self.assertIsInstance(res, Sectie)
self.assertEqual(res.id, 'A')
self.assertEqual(res.afdeling.id, 44021)
self.assertEqual(
self.capakey.caches['long'].get('GetKadSectieByKadSectiecode#44021#A'),
res
)
def test_list_percelen_by_sectie(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
res = self.capakey.list_percelen_by_sectie(s)
self.assertIsInstance(res, list)
self.assertGreater(len(res), 0)
self.assertEqual(
self.capakey.caches['short'].get('ListKadPerceelsnummersByKadSectiecode#44021#A#1'),
res
)
def test_get_perceel_by_id_and_sectie(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
percelen = self.capakey.list_percelen_by_sectie(s)
perc = percelen[0]
res = self.capakey.get_perceel_by_id_and_sectie(perc.id, s)
self.assertIsInstance(res, Perceel)
self.assertEqual(res.sectie.id, 'A')
self.assertEqual(res.sectie.afdeling.id, 44021)
self.assertEqual(
self.capakey.caches['short'].get('GetKadPerceelsnummerByKadPerceelsnummer#44021#A#%s' % perc.id),
res
)
def test_get_perceel_by_capakey(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
percelen = self.capakey.list_percelen_by_sectie(s)
perc = percelen[0]
res = self.capakey.get_perceel_by_capakey(perc.capakey)
self.assertIsInstance(res, Perceel)
self.assertEqual(res.sectie.id, 'A')
self.assertEqual(res.sectie.afdeling.id, 44021)
self.assertEqual(
self.capakey.caches['short'].get('GetKadPerceelsnummerByCaPaKey#%s' % perc.capakey),
res
)
def test_get_perceel_by_percid(self):
s = self.capakey.get_sectie_by_id_and_afdeling('A', 44021)
percelen = self.capakey.list_percelen_by_sectie(s)
perc = percelen[0]
res = self.capakey.get_perceel_by_percid(perc.percid)
self.assertIsInstance(res, Perceel)
self.assertEqual(res.sectie.id, 'A')
self.assertEqual(res.sectie.afdeling.id, 44021)
self.assertEqual(
self.capakey.caches['short'].get('GetKadPerceelsnummerByPERCID#%s' % perc.percid),
res
)
| {
"content_hash": "93a32cbb3bb14b1035e8d6b6e19b401a",
"timestamp": "",
"source": "github",
"line_count": 601,
"max_line_length": 113,
"avg_line_length": 34.51414309484193,
"alnum_prop": 0.6003471050474859,
"repo_name": "kmillet/crabpytest",
"id": "4cee8b631899755173d5079ec958c5038161b4c1",
"size": "20768",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crabpy/tests/gateway/test_capakey.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "68652"
},
{
"name": "Shell",
"bytes": "6701"
}
],
"symlink_target": ""
} |
import random
import pygame
from pygame.locals import *
from gamelib.palette import *
from gamelib.gamemodels import *
CHEAT = True
def DrawForeground(Surface, State, ScreenSize):
fgRect = Rect(0,int(ScreenSize[1]/2),ScreenSize[0],int(ScreenSize[1]/2))
SectorDetails = State.GetSectorsAhead()
print(SectorDetails)
#Back Sector
if not SectorDetails[2] is None:
bsr = Rect(0, fgRect.h, fgRect.w, int(fgRect.h/4))
DrawGradient(Surface, GetGroundColour(SectorDetails[2],State.DayPhase), bsr)
if "tree" in SectorDetails[2]:
DrawTree(Surface, SectorDetails[2]["tree"], bsr, 0.25)
#Mid Sector
if not SectorDetails[1] is None:
msr = Rect(0, fgRect.h+int(fgRect.h/4), fgRect.w, int(fgRect.h/4))
DrawGradient(Surface, GetGroundColour(SectorDetails[1],State.DayPhase), msr)
if "tree" in SectorDetails[1]:
DrawTree(Surface, SectorDetails[1]["tree"], msr, 0.5)
#Front Sector
fsr = Rect(0, fgRect.h+int(fgRect.h/2), fgRect.w, int(fgRect.h/2))
DrawGradient(Surface, GetGroundColour(SectorDetails[0],State.DayPhase), fsr)
if "tree" in SectorDetails[0]:
DrawTree(Surface, SectorDetails[0]["tree"], fsr)
if "shore" in SectorDetails[0]:
DrawReeds(Surface, SectorDetails[0]["shore"], fsr)
def DrawForeground2(Surface, State, ScreenSize):
fgRect = Rect(0,int(ScreenSize[1]/2),ScreenSize[0],int(ScreenSize[1]/2))
bsr = Rect(0, fgRect.h, fgRect.w, int(fgRect.h/4))
msr = Rect(0, fgRect.h+int(fgRect.h/4), fgRect.w, int(fgRect.h/4))
fsr = Rect(0, fgRect.h+int(fgRect.h/2), fgRect.w, int(fgRect.h/2))
SectorDetails = State.GetSectorsAhead()
if not SectorDetails[2] is None:
DrawSector(Surface, State, SectorDetails[2], bsr, 0.18)
if not SectorDetails[1] is None:
DrawSector(Surface, State, SectorDetails[1], msr, 0.5)
if not SectorDetails[0] is None:
DrawSector(Surface, State, SectorDetails[0], fsr, 1)
def DrawSector(Surface, State, SectorDetails, sr, Scale):
DrawGradient(Surface, GetGroundColour(SectorDetails,State.DayPhase), sr)
if "tree" in SectorDetails:
DrawTree(Surface, SectorDetails["tree"], sr, Scale)
if "shore" in SectorDetails:
DrawReeds(Surface, SectorDetails["shore"], sr, Scale)
if "mushroom" in SectorDetails:
DrawMushroom(Surface, SectorDetails["mushroom"], sr, Scale)
if "flowers" in SectorDetails:
DrawFlower(Surface, SectorDetails["flowers"], sr, Scale)
def GetGroundColour(SectorDetails, DayPhase):
g = Color(255, 155, 155)
if "water" in SectorDetails:
g = Water[DayPhase]
elif "shore" in SectorDetails:
g = Shore[DayPhase]
elif "snow" in SectorDetails:
g = Snow[DayPhase]
elif "tree" in SectorDetails:
g = WoodGround[DayPhase]
elif "ground" in SectorDetails:
g = Ground[DayPhase]
return g
def DrawReeds(Surface, PosList, DispRect, Scale = 1):
for reedPos in PosList:
t = MakeReeds()
DrawObject(Surface, t, DispRect, Scale, reedPos)
def DrawFlower(Surface, Pos, DispRect, Scale = 1):
t = MakeFlowers()
DrawObject(Surface, t, DispRect, Scale, Pos)
print("Draw Flower")
def DrawMushroom(Surface, Pos, DispRect, Scale = 1):
t = MakeMushroom()
DrawObject(Surface, t, DispRect, Scale, Pos)
def DrawTree(Surface, Pos, DispRect, Scale = 1):
t = MakeTree()
DrawObject(Surface, t, DispRect, Scale, Pos)
def DrawObject(Surface, t, DispRect, Scale, Pos):
t.Width = int(t.Width*Scale)
t.Height = int(t.Height*Scale)
t.Pos = [DispRect.x + int((Pos[0]/100)*DispRect.w), DispRect.y + int((Pos[1]/100)*DispRect.h)]
t.Pos[1] -= t.Size()[1]
if t.Pos[0] + t.Size()[0]> DispRect.w:
t.Pos[0] = DispRect.w - t.Size()[0]
t.Draw(Surface)
def DrawGradient(Surface, SkyColor, SkyR):
src = Rect(SkyR.x,SkyR.y, SkyR.w, int(SkyR.h/10) )
csky = SkyColor
for i in range(10):
src = Rect(SkyR.x, SkyR.y + (src.h*i), SkyR.w, src.h )
bbb = csky.b-(i*8)
if bbb<0:bbb=0
pygame.draw.rect(Surface, Color(csky.r,csky.g,bbb), src, 0)
def DrawMap(surface, moonmap):
tile = 4
w = moonmap.Model.Width
S = moonmap.Model.Sectors
for x in range(w):
for y in range(w):
r = S[x][y]
if CHEAT or "visited" in r:
c = Color(255,255,255)
if "water" in r:
c = Color(0, 0, 255)
elif "shore" in r:
c = Color(128, 64, 0)
elif "tree" in r:
c = Color(128, 255, 0)
elif "snow" in r:
c = Color(255, 255, 255)
elif "ground" in r:
c = Color(0, 255, 0)
if "flowers" in r:
c = Color(0, 0, 0)
pygame.draw.rect(surface, c, Rect(x*tile, y*tile, tile, tile ), 0)
pr = Rect(tile*moonmap.PlayerPos[0], tile*moonmap.PlayerPos[1], tile, tile)
pygame.draw.rect(surface,Color(255,0,0),pr)
| {
"content_hash": "c9bb60be6c442a46cb5ac0014884ad7a",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 98,
"avg_line_length": 35.895833333333336,
"alnum_prop": 0.6014703037337976,
"repo_name": "daftspaniel/daftpyweek17",
"id": "15ecde47552a64a5dbb54d8775094956ac44a975",
"size": "5169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gamelib/gfx.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "123609"
}
],
"symlink_target": ""
} |
import re
import json
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseBadRequest
from django.shortcuts import get_object_or_404
from nduser.models import Dataset, Project, Token, Channel
from stats.models import Histogram
import stats.tasks
from ndstats.histio import loadHistogram, loadHistogramROI
from ndstats.histstats import HistStats
from ndlib.ndtype import READONLY_TRUE, READONLY_FALSE, UINT8, UINT16, UINT32, UINT64, FLOAT32
import logging
logger = logging.getLogger("neurodata")
# AB TODO: kill this after moving binning code
import numpy as np
""" Histogram Functions """
def getHist(request, webargs):
""" Return JSON representation of histogram """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/hist/$", webargs)
[token, channel] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
try:
(hist, bins) = loadHistogram(token, channel)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}'.format(token,channel))
jsondict = {}
jsondict['hist'] = hist.tolist()
jsondict['bins'] = bins.tolist()
return HttpResponse(json.dumps(jsondict, indent=4), content_type="application/json")
def getHistROI(request, webargs):
""" Return JSON representation of a histogram given an ROI """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/hist/roi/(?P<roi>[\d,-]+)$", webargs)
md = m.groupdict()
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
token = md['token']
channel = md['channel']
# parse roi
roistr = md['roi'].split('-')
roi = []
for i in range(2):
try:
m = re.match(r"^(?P<x>[\d.]+),(?P<y>[\d.]+),(?P<z>[\d.]+)$", roistr[i])
md = m.groupdict()
roi.append([int(md['x']), int(md['y']), int(md['z'])])
except:
return HttpResponseBadRequest("Error: Failed to read ROI coordinate ({})".format(roistr[i]))
try:
(hist, bins) = loadHistogramROI(token, channel, roi)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}, {}'.format(token,channel,roi))
jsondict = {}
jsondict['hist'] = hist.tolist()
jsondict['bins'] = bins.tolist()
jsondict['roi'] = roi
return HttpResponse(json.dumps(jsondict, indent=4), content_type="application/json")
def getBinnedHistROI(request, webargs):
""" Return JSON representation of a histogram reduced by a factor of 10 """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/binnedhist/roi/(?P<roi>[\d,-]+)$", webargs)
md = m.groupdict()
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
token = md['token']
channel = md['channel']
# parse roi
roistr = md['roi'].split('-')
roi = []
for i in range(2):
try:
m = re.match(r"^(?P<x>[\d.]+),(?P<y>[\d.]+),(?P<z>[\d.]+)$", roistr[i])
md = m.groupdict()
roi.append([int(md['x']), int(md['y']), int(md['z'])])
except:
return HttpResponseBadRequest("Error: Failed to read ROI coordinate ({})".format(roistr[i]))
try:
(hist, bins) = loadHistogramROI(token, channel, roi)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}, {}'.format(token,channel,roi))
newhist = np.zeros(hist.shape[0]/10, dtype=np.int64)
newbins = np.zeros(hist.shape[0]/10+1, dtype=np.int64)
# TODO quick and dirty binning for now. stick the binning code in loadhistogramROI and write a generic view that accepts options
for i, val in enumerate(hist):
newidx = np.floor(i / 10)
if newidx >= newhist.shape[0]:
continue
newhist[newidx] += val
for i, val in enumerate(bins):
if i % 10 == 0:
newbins[np.floor(i / 10)] = i
jsondict = {}
jsondict['hist'] = newhist.tolist()
jsondict['bins'] = newbins.tolist()
jsondict['roi'] = roi
return HttpResponse(json.dumps(jsondict, indent=4), content_type="application/json")
def getROIs(request, webargs):
""" Return a list of ROIs as JSON """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/hist/roi/$", webargs)
md = m.groupdict()
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
token = md['token']
channel = md['channel']
# check to make sure token exists
tokenobj = get_object_or_404(Token, token_name = token)
# get the project
projectobj = tokenobj.project
# get the channel
chanobj = get_object_or_404(Channel, project = projectobj, channel_name = channel)
rois = Histogram.objects.filter( channel = chanobj, region = 1 ).values_list( 'roi' )
jsonrois = []
for roi in rois:
jsonrois.append(json.loads(roi[0]))
return HttpResponse(json.dumps(jsonrois, sort_keys=True, indent=4), content_type="application/json")
def genHist(request, webargs):
""" Kicks off a background job to generate the histogram """
if request.method == 'GET':
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/genhist/$", webargs)
[token, channel] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
# check to make sure token exists
tokenobj = get_object_or_404(Token, token_name = token)
# get the project
projectobj = tokenobj.project
# get the channel
chanobj = get_object_or_404(Channel, project = projectobj, channel_name = channel)
# get the dataset
datasetobj = projectobj.dataset
if (chanobj.readonly == READONLY_TRUE):
# we can only generate histograms on writeable channels
return HttpResponseBadRequest("Error: Channel must not be readonly to generate histograms.")
# now that we have a channel, kickoff the background job that will generate the histogram
# determine number of bits (2**bits = numbins)
if (chanobj.channel_datatype == UINT8):
bits = 8
elif (chanobj.channel_datatype == UINT16):
bits = 16
#elif (chanobj.channel_datatype == UINT32):
# bits = 32
else:
return HttpResponseBadRequest("Error: Unsupported datatype ({})".format(chanobj.channel_datatype))
# run the background job
result = stats.tasks.generateHistogramTask.delay(tokenobj.token_name, chanobj.channel_name, chanobj.resolution, bits)
jsondict = {}
jsondict['token'] = tokenobj.token_name
jsondict['channel'] = chanobj.channel_name
jsondict['jobid'] = result.id
jsondict['state'] = result.state
return HttpResponse(json.dumps(jsondict, sort_keys=True, indent=4), content_type="application/json")
elif request.method == 'POST':
params = json.loads(request.body)
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/genhist/$", webargs)
[token, channel] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
# check to make sure token exists
tokenobj = get_object_or_404(Token, token_name = token)
# get the project
projectobj = tokenobj.project
# get the channel
chanobj = get_object_or_404(Channel, project = projectobj, channel_name = channel)
# get the dataset
datasetobj = projectobj.dataset
if (chanobj.readonly == READONLY_TRUE):
# we can only generate histograms on writeable channels
return HttpResponseBadRequest("Error: Channel must not be readonly to generate histograms.")
# now that we have a channel, kickoff the background job that will generate the histogram
# determine number of bits (2**bits = numbins)
if (chanobj.channel_datatype == UINT8):
bits = 8
elif (chanobj.channel_datatype == UINT16):
bits = 16
#elif (chanobj.channel_datatype == UINT32):
# bits = 32
else:
return HttpResponseBadRequest("Error: Unsupported datatype ({})".format(chanobj.channel_datatype))
if 'ROI' in params.keys():
# run one histogram task for each ROI
results = []
for roicords in params['ROI']:
# do some basic error checking
if len(roicords) != 2:
return HttpResponseBadRequest("Error: Failed to read ROI coordinate. Need 2 points! ({})".format(roicords))
if len(roicords[0]) != 3 or len(roicords[1]) != 3:
return HttpResponseBadRequest("Error: Failed to read ROI coordinate. Need 3 coordinates per point! ({})".format(roicords))
# check to make sure ROI cords define a cube
for i in range(3):
if roicords[0][i] >= roicords[1][i]:
return HttpResponseBadRequest("Error: provided ROI coordinates do not define a cube! ({})".format(roicords))
# check ROI cords to see if they are inside dataset
xoffset = datasetobj.xoffset
yoffset = datasetobj.yoffset
zoffset = datasetobj.zoffset
# convert roi into base 0
(x0, x1) = (roicords[0][0]-xoffset, roicords[1][0]-xoffset)
(y0, y1) = (roicords[0][1]-yoffset, roicords[1][1]-yoffset)
(z0, z1) = (roicords[0][2]-zoffset, roicords[1][2]-zoffset)
# check dimensions
if x0 < 0 or x1 > datasetobj.ximagesize:
return HttpResponseBadRequest("Error: x coordinate range outside of dataset bounds! ({}, {})".format(roicords[0][0], roicords[1][0]))
if y0 < 0 or y1 > datasetobj.yimagesize:
return HttpResponseBadRequest("Error: y coordinate range outside of dataset bounds! ({}, {})".format(roicords[0][1], roicords[1][1]))
if z0 < 0 or z1 > datasetobj.zimagesize:
return HttpResponseBadRequest("Error: z coordinate range outside of dataset bounds! ({}, {})".format(roicords[0][2], roicords[1][2]))
result = stats.tasks.generateHistogramROITask.delay(tokenobj.token_name, chanobj.channel_name, chanobj.resolution, bits, roicords)
results.append({
'jobid': result.id,
'state': result.state,
'roi': roicords,
})
elif 'RAMON' in params.keys():
# parse RAMON
return HttpResponseBadRequest("RAMON histogram service not implemented.")
else:
return HttpResponseBadRequest("Unsupported parameter.")
jsondict = {}
jsondict['token'] = tokenobj.token_name
jsondict['channel'] = chanobj.channel_name
jsondict['results'] = results
return HttpResponse(json.dumps(jsondict, sort_keys=True, indent=4), content_type="application/json")
""" Statistics Functions """
def mean(request, webargs):
""" Return mean """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/mean/$", webargs)
[token, channel] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
try:
(hist, bins) = loadHistogram(token, channel)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}'.format(token,channel))
hs = HistStats()
mean = hs.mean(hist, bins)
return HttpResponse(mean)
def std(request, webargs):
""" Return std """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/std/$", webargs)
[token, channel] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
try:
(hist, bins) = loadHistogram(token, channel)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}'.format(token,channel))
hs = HistStats()
stddev = hs.stddev(hist, bins)
return HttpResponse(stddev)
def percentile(request, webargs):
""" Return arbitrary percentile """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/percentile/(?P<percent>[\d.]+)/$", webargs)
[token, channel, percent] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
try:
(hist, bins) = loadHistogram(token, channel)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}'.format(token,channel))
hs = HistStats()
percentile = hs.percentile(hist, bins, percent)
jsondict = {}
jsondict[percent] = percentile
return HttpResponse(json.dumps(jsondict, indent=4), content_type="application/json")
def all(request, webargs):
""" Display all statistics or 404 if no histogram is present """
# process webargs
try:
m = re.match(r"(?P<token>[\w+]+)/(?P<channel>[\w+]+)/all/$", webargs)
[token, channel] = [i for i in m.groups()]
except Exception, e:
logger.error("Incorrect format for web arguments {}. {}".format(webargs, e))
return HttpResponseBadRequest("Incorrect format for web arguments {}. {}".format(webargs, e))
try:
(hist, bins) = loadHistogram(token, channel)
except Histogram.DoesNotExist:
return HttpResponseNotFound('No histogram found for {}, {}'.format(token,channel))
hs = HistStats()
mean = hs.mean(hist, bins)
stddev = hs.stddev(hist, bins)
percents = [hs.percentile(hist, bins, 1), hs.percentile(hist, bins, 50), hs.percentile(hist, bins, 99)]
min = hs.min(hist, bins)
max = hs.max(hist, bins)
jsondict = {}
jsondict['hist'] = hist.tolist()
jsondict['bins'] = bins.tolist()
jsondict['mean'] = mean
jsondict['stddev'] = stddev
jsondict['percents'] = {}
jsondict['percents']['1'] = percents[0]
jsondict['percents']['50'] = percents[1]
jsondict['percents']['99'] = percents[2]
jsondict['min'] = min
jsondict['max'] = max
return HttpResponse(json.dumps(jsondict, indent=4), content_type="application/json")
| {
"content_hash": "0bff500c6b2579b742e8afdd283bd79a",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 143,
"avg_line_length": 37.042821158690174,
"alnum_prop": 0.6593227254181967,
"repo_name": "neurodata/ndstore",
"id": "2cc675592ed7d70c16bf5397d4d0c0798fc121f8",
"size": "15302",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/stats/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "43094"
},
{
"name": "HTML",
"bytes": "83782"
},
{
"name": "JavaScript",
"bytes": "75900"
},
{
"name": "Nginx",
"bytes": "1743"
},
{
"name": "Python",
"bytes": "1491127"
},
{
"name": "Shell",
"bytes": "14105"
}
],
"symlink_target": ""
} |
"""An example functional test
The module-level docstring should include a high-level description of
what the test is doing. It's the first thing people see when they open
the file and should give the reader information about *what* the test
is testing and *how* it's being tested
"""
# Imports should be in PEP8 ordering (std library first, then third party
# libraries then local imports).
from collections import defaultdict
# Avoid wildcard * imports
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.messages import CInv
from test_framework.mininode import (
P2PInterface,
mininode_lock,
msg_block,
msg_getdata,
)
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
wait_until,
)
# P2PInterface is a class containing callbacks to be executed when a P2P
# message is received from the node-under-test. Subclass P2PInterface and
# override the on_*() methods if you need custom behaviour.
class BaseNode(P2PInterface):
def __init__(self):
"""Initialize the P2PInterface
Used to initialize custom properties for the Node that aren't
included by default in the base class. Be aware that the P2PInterface
base class already stores a counter for each P2P message type and the
last received message of each type, which should be sufficient for the
needs of most tests.
Call super().__init__() first for standard initialization and then
initialize custom properties."""
super().__init__()
# Stores a dictionary of all blocks received
self.block_receive_map = defaultdict(int)
def on_block(self, message):
"""Override the standard on_block callback
Store the hash of a received block in the dictionary."""
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
def on_inv(self, message):
"""Override the standard on_inv callback"""
pass
def custom_function():
"""Do some custom behaviour
If this function is more generally useful for other tests, consider
moving it to a module in test_framework."""
# self.log.info("running custom_function") # Oops! Can't run self.log outside the SyscoinTestFramework
pass
class ExampleTest(SyscoinTestFramework):
# Each functional test is a subclass of the SyscoinTestFramework class.
# Override the set_test_params(), skip_test_if_missing_module(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
def set_test_params(self):
"""Override test parameters for your individual test.
This method must be overridden and num_nodes must be explicitly set."""
self.setup_clean_chain = True
self.num_nodes = 3
# Use self.extra_args to change command-line arguments for the nodes
self.extra_args = [[], ["-logips"], []]
# self.log.info("I've finished set_test_params") # Oops! Can't run self.log before run_test()
# Use skip_test_if_missing_module() to skip the test if your test requires certain modules to be present.
# This test uses generate which requires wallet to be compiled
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
# Use add_options() to add specific command-line options for your test.
# In practice this is not used very much, since the tests are mostly written
# to be run in automated environments without command-line options.
# def add_options()
# pass
# Use setup_chain() to customize the node data directories. In practice
# this is not used very much since the default behaviour is almost always
# fine
# def setup_chain():
# pass
def setup_network(self):
"""Setup the test network topology
Often you won't need to override this, since the standard network topology
(linear: node0 <-> node1 <-> node2 <-> ...) is fine for most tests.
If you do override this method, remember to start the nodes, assign
them to self.nodes, connect them and then sync."""
self.setup_nodes()
# In this test, we're not connecting node2 to node0 or node1. Calls to
# sync_all() should not include node2, since we're not expecting it to
# sync.
connect_nodes(self.nodes[0], 1)
self.sync_all(self.nodes[0:2])
# Use setup_nodes() to customize the node start behaviour (for example if
# you don't want to start all nodes at the start of the test).
# def setup_nodes():
# pass
def custom_method(self):
"""Do some custom behaviour for this test
Define it in a method here because you're going to use it repeatedly.
If you think it's useful in general, consider moving it to the base
SyscoinTestFramework class so other tests can use it."""
self.log.info("Running custom_method")
def run_test(self):
"""Main test logic"""
# Create P2P connections will wait for a verack to make sure the connection is fully up
self.nodes[0].add_p2p_connection(BaseNode())
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
self.sync_all(self.nodes[0:2])
# Notice above how we called an RPC by calling a method with the same
# name on the node object. Notice also how we used a keyword argument
# to specify a named RPC argument. Neither of those are defined on the
# node object. Instead there's some __getattr__() magic going on under
# the covers to dispatch unrecognised attribute calls to the RPC
# interface.
# Logs are nice. Do plenty of them. They can be used in place of comments for
# breaking the test into sub-sections.
self.log.info("Starting test!")
self.log.info("Calling a custom function")
custom_function()
self.log.info("Calling a custom method")
self.custom_method()
self.log.info("Create some blocks")
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
height = self.nodes[0].getblockcount()
for i in range(10):
# Use the mininode and blocktools functionality to manually build a block
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
block = create_block(self.tip, create_coinbase(height+1), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our P2PInterface
self.nodes[0].p2p.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
height += 1
self.log.info("Wait for node1 to reach current tip (height 11) using RPC")
self.nodes[1].waitforblockheight(11)
self.log.info("Connect node2 and node1")
connect_nodes(self.nodes[1], 2)
self.log.info("Wait for node2 to receive all the blocks from node1")
self.sync_all()
self.log.info("Add P2P connection to node2")
self.nodes[0].disconnect_p2ps()
self.nodes[2].add_p2p_connection(BaseNode())
self.log.info("Test that node2 propagates all the blocks to us")
getdata_request = msg_getdata()
for block in blocks:
getdata_request.inv.append(CInv(2, block))
self.nodes[2].p2p.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
# P2PInterface objects.
wait_until(lambda: sorted(blocks) == sorted(list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock)
self.log.info("Check that each block was received only once")
# The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
# messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
# and synchronization issues. Note wait_until() acquires this global lock when testing the predicate.
with mininode_lock:
for block in self.nodes[2].p2p.block_receive_map.values():
assert_equal(block, 1)
if __name__ == '__main__':
ExampleTest().main()
| {
"content_hash": "e318c7d79dafd3458c68a19eacd83974",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 133,
"avg_line_length": 40.84976525821596,
"alnum_prop": 0.6671646937133663,
"repo_name": "syscoin/syscoin2",
"id": "dc6a091a8e287043926c5fe0f3d4d7cab66fc9bb",
"size": "8915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/example_test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "703095"
},
{
"name": "C++",
"bytes": "5813180"
},
{
"name": "CSS",
"bytes": "33839"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "32390"
},
{
"name": "M4",
"bytes": "186312"
},
{
"name": "Makefile",
"bytes": "105565"
},
{
"name": "Objective-C",
"bytes": "92290"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Python",
"bytes": "1028263"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Roff",
"bytes": "30562"
},
{
"name": "Shell",
"bytes": "63790"
}
],
"symlink_target": ""
} |
import numpy as np
from math import factorial
from openmdao.lib.datatypes.api import List, Bool, Array, Instance, Int, Float
from fusedwind.turbine.geometry_vt import Curve
from fusedwind.lib.distfunc import distfunc
from fusedwind.lib.geom_tools import calculate_length
from fusedwind.lib.cubicspline import NaturalCubicSpline
def _C(n, k):
return factorial(n) / (factorial(k) * factorial(n - k))
class BezierCurve(Curve):
"""
Computes a 2D/3D bezier curve
"""
CPs = Array(desc='Array of control points')
def add_control_point(self, p):
C = list(self.CPs)
C.append(list(p))
self.CPs = np.asarray(C)
def update(self):
try:
self.nd = self.CPs.shape[1]
except:
raise RuntimeError('CPs needs to an array of shape (m, n)')
if self.ni == 0:
self.ni = 100
points = self._compute(self.CPs)
# self._s = calculate_length(points)
# self._s /= self._s[-1]
self.initialize(points)
# def _compute_dp(self):
# """
# computes the derivatives (tangent vectors) along a Bezier curve
# wrt ``t``.
# there is no trivial analytic function to compute derivatives wrt
# to a given space interval, so we just spline and redistribute
# see: http://pomax.github.io/bezierinfo/
# """
# C = np.zeros((self.CPs.shape[0] - 1, self.nd))
# nC = C.shape[0]
# for i in range(nC):
# C[i, :] = float(nC) * (self.CPs[i + 1] - self.CPs[i])
# dp = self._compute(C)
def _compute(self, C):
points = np.zeros((self.ni, self.nd))
self.t = np.linspace(0., 1., self.ni)
# control point iterator
_n = xrange(C.shape[0])
for i in range(self.ni):
s = self.t[i]
n = _n[-1]
points[i, :] = 0.
for j in range(self.nd):
for m in _n:
# compute bernstein polynomial
b_i = _C(n, m) * s**m * (1 - s)**(n - m)
# multiply ith control point by ith bernstein polynomial
points[i, j] += C[m, j] * b_i
return points
| {
"content_hash": "9e94caf419eeaea6be7965aba0848b31",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 78,
"avg_line_length": 28.44871794871795,
"alnum_prop": 0.5434880576836413,
"repo_name": "mrosemeier/fusedwind",
"id": "fefadff82902c3c94ae0cb8bae3c2fb09837126d",
"size": "2220",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/fusedwind/lib/bezier.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "656114"
},
{
"name": "Shell",
"bytes": "447"
}
],
"symlink_target": ""
} |
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from nltkapp.models import Document, Corpus
import logging
logger = logging.getLogger('nltksite.nltkapp')
def index(request):
logger.debug("index requested.")
corpora = Corpus.objects.all()
return render_to_response('nltkapp/index.html', {'corpora': corpora}, context_instance=RequestContext(request)) | {
"content_hash": "12fa68392aaf97da82f2bb73db22d2c1",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 112,
"avg_line_length": 36.166666666666664,
"alnum_prop": 0.804147465437788,
"repo_name": "wtamu-cisresearch/nltksite",
"id": "473172d5bbb98ae0b30524ec488c808730e0e467",
"size": "460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old/nltksite/nltkapp/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "141008"
},
{
"name": "HTML",
"bytes": "24816"
},
{
"name": "JavaScript",
"bytes": "1429016"
},
{
"name": "Python",
"bytes": "45044"
}
],
"symlink_target": ""
} |
import os
import time
import RPi.GPIO as GPIO
#Change this to whatever temperature you want the fan to kick on at.
maxTemp = 45 #Celsius
#Sets the time to wait in between checks
waitTime = 5 #seconds
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(8, GPIO.OUT)
while(True):
temperature = os.popen('vcgencmd measure_temp').readline()
tempInC = float(temperature.replace("temp=","").replace("'C\n",""))
if(tempInC >= maxTemp):
GPIO.output(8, True)
else:
GPIO.output(8, False)
time.sleep(waitTime)
| {
"content_hash": "049dec1cace9c2ec42225fac2368537a",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 71,
"avg_line_length": 27.25,
"alnum_prop": 0.691743119266055,
"repo_name": "andrewkarch/Eleduino-Fan-Controller",
"id": "009a37c35bb91d70ec101fe829d04d8e1f4171b3",
"size": "545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fanController.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "545"
}
],
"symlink_target": ""
} |
from distutils.core import setup
from runcmd import __version__ as runcmd_version
setup(
name='runcmd',
version=runcmd_version,
packages=[''],
url='https://github.com/wschang/RunCmd',
license='MIT',
author='Wen Shan Chang',
author_email='shan.and.android@gmail.com',
description='Run commands with a timeout option'
)
| {
"content_hash": "44ea35647a86856edd13a6d62cf84030",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 52,
"avg_line_length": 27,
"alnum_prop": 0.6837606837606838,
"repo_name": "wschang/RunCmd",
"id": "2905a5a8ca8ce771931bb8da959945eaa7baa8d5",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28199"
}
],
"symlink_target": ""
} |
"""
Holds a base class for frontend modules used in the SATOSA proxy.
"""
from ..attribute_mapping import AttributeMapper
class FrontendModule(object):
"""
Base class for a frontend module.
"""
def __init__(self, auth_req_callback_func, internal_attributes, base_url, name):
"""
:type auth_req_callback_func:
(satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response
:type internal_attributes: dict[str, dict[str, str | list[str]]]
:type name: str
:param auth_req_callback_func: Callback should be called by the module after the
authorization response has been processed.
:param name: name of the plugin
"""
self.auth_req_callback_func = auth_req_callback_func
self.internal_attributes = internal_attributes
self.converter = AttributeMapper(internal_attributes)
self.base_url = base_url
self.name = name
def handle_authn_response(self, context, internal_resp):
"""
If an authorization has been successful in a backend, this function is called and is
supposed to send an authorization response to the client.
:type context: satosa.context.Context
:type internal_resp: satosa.internal.InternalData
:rtype satosa.response.Response
:param context: The request context
:param internal_resp: Attributes from the authorization
:return response
"""
raise NotImplementedError()
def handle_backend_error(self, exception):
"""
IF the backend gets an unexpected error, a suitable notice about the failure should be sent
to the requester. This function is supposed to send a suitable error message to the
requester.
:type exception: satosa.exception.SATOSAError
:rtype: satosa.response.Response
:param exception: The raised exception
:return: response
"""
raise NotImplementedError()
def register_endpoints(self, backend_names):
"""
Register frontend functions to endpoint urls.
Example of registering an endpoint:
providers = ["Saml2IDP", "OIDCOP"]
reg_endp = [
("^Saml2IDP/sso/redirect$", endpoint_function),
("^OIDCOP/sso/redirect$", endpoint_function),
]
:type backend_names: list[str]
:rtype List[Tuple[str, Callable[[satosa.context.Context, Any], satosa.response.Response]]]
:param backend_names: Names of all all configured backends.
All regexes produced for the frontends authentication endpoint must contain each backend name, e.g.:
urls = []
for name in backend_names:
urls.append("{}/authentication".format(name))
urls.append("global_endpoint")
return urls
:return: A list with functions and args bound to a specific endpoint url,
[(regexp, function), ...]
"""
raise NotImplementedError()
| {
"content_hash": "1b287391d994339c837f11cef8fbf313",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 112,
"avg_line_length": 35.94186046511628,
"alnum_prop": 0.6311873180200582,
"repo_name": "its-dirg/SATOSA",
"id": "52840a85c6c446623a0c8ca6952c69f3ab2c119a",
"size": "3091",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/satosa/frontends/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "412362"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_thune_grassland_guardian.iff"
result.attribute_template_id = 9
result.stfName("monster_name","thune")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "528bc4a28b9a9bdd30bdb934b6cc3bac",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 70,
"avg_line_length": 22.923076923076923,
"alnum_prop": 0.697986577181208,
"repo_name": "obi-two/Rebelion",
"id": "1cbc058dfdd89d51276fd3b7c37b9667d985a0e0",
"size": "443",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/mobile/shared_thune_grassland_guardian.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
from mininet.net import Mininet, CLI
from scapy.all import *
import os
host_mac = "02:00:00:00:00:02"
echoer_mac = "02:00:00:00:00:01"
echoer_iface_name = "echoer-eth0"
PAX = None
try:
PAX = os.environ['PAX']
except KeyError:
print "PAX environment variable must point to path where Pax repo is cloned"
exit(1)
import sys
sys.path.insert(0, PAX + "/mininet/")
from pax_mininet_node import PaxNode
net = Mininet()
echoer = net.addHost('echoer', mac=echoer_mac, cls=PaxNode)
host = net.addHost('host', mac=host_mac)
switch = net.addSwitch('s0')
controller = net.addController('c0')
net.addLink(echoer, switch)
net.addLink(host, switch)
net.start()
echoer.cmd("sudo " + PAX + "/Bin/Pax.exe " + PAX + "/examples/EthernetEcho/ethernet_echo.json " + PAX + "/examples/Bin/Examples.dll &")
output = host.cmdPrint("sudo python " + PAX + "/examples/EthernetEcho/mn_ethernet_echo_test.py")
print output
net.stop()
| {
"content_hash": "28eaf4c1dfeb422ff40de19cafc5b85e",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 135,
"avg_line_length": 26.2,
"alnum_prop": 0.707742639040349,
"repo_name": "niksu/pax",
"id": "6af0d1e54d5ce279dba1c71dd313d8514428fab0",
"size": "1110",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/EthernetEcho/mn_ethernet_echo.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "89528"
},
{
"name": "Python",
"bytes": "1987"
},
{
"name": "Shell",
"bytes": "1935"
}
],
"symlink_target": ""
} |
import os,sys
curr_path = os.getcwd()
gerkin_path = os.path.split(curr_path)[0]
olfaction_prediction_path = os.path.split(gerkin_path)[0]
sys.path.append(olfaction_prediction_path) | {
"content_hash": "55a1baab7b350ad93606ba02ca843a05",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 57,
"avg_line_length": 36,
"alnum_prop": 0.7666666666666667,
"repo_name": "dream-olfaction/olfaction-prediction",
"id": "0f8e2d6f0af7061409bbb3a92bc2d9c577792711",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opc_python/gerkin/prelims.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "952179"
},
{
"name": "Jupyter Notebook",
"bytes": "11495462"
},
{
"name": "MATLAB",
"bytes": "55979"
},
{
"name": "Python",
"bytes": "118002"
},
{
"name": "R",
"bytes": "14592"
},
{
"name": "Shell",
"bytes": "1387"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
import os
import shutil
import tempfile
import unittest2
import git
from normalize import Property
from normalize import Record
from normalize.identity import record_id
from unique.store import Commit
from unique.store import Page
from unique.store import Tree
from testclasses import MultiLevelKeyValue
class BasicUntyped(Record):
key = Property()
value = Property()
primary_key = [key]
class TestRead(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.tempdir = tempfile.mkdtemp()
cls.repo = git.repo.Repo.init(cls.tempdir)
fixtures = os.path.join(
os.path.split(__file__)[0],
"fixtures",
)
cls.files = {}
to_add = []
for root, dirs, files in os.walk(fixtures, topdown=True):
reldir = os.path.relpath(root, fixtures)
for fn in dirs:
os.mkdir(os.path.join(cls.tempdir, reldir, fn))
for fn in files:
if fn.endswith(".json"):
shutil.copy(
os.path.join(fixtures, root, fn),
os.path.join(cls.tempdir, reldir, fn),
)
to_add.append(fn if reldir == os.path.curdir else
os.path.join(reldir, fn))
for entry in cls.repo.index.add(to_add):
cls.files[entry.path] = git.objects.Object(
cls.repo, entry.binsha,
)
cls.repo.index.commit("test_read initial fixtures")
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tempdir)
def assertScan(self, store, expected):
seen = OrderedDict()
last_key = None
for key, row in store.scan():
self.assertIsInstance(key, tuple)
self.assertIsInstance(row, MultiLevelKeyValue)
self.assertNotIn(key, seen)
if last_key is not None:
self.assertGreater(key, last_key)
seen[key] = row
self.assertEqual(len(seen), expected)
self.last_scanned = seen
self.last_key = key
self.last_row = row
def test_simple_page_read(self):
page = Page.from_gitobject(
MultiLevelKeyValue, self.files['Gumdrop.json'],
)
self.assertScan(page, 1)
self.assertEqual(page.get(self.last_key), self.last_row)
def test_multi_page_read(self):
page = Page.from_gitobject(
MultiLevelKeyValue, self.files['Scout-Start.json'],
)
self.assertScan(page, 4)
self.assertEqual(page.range['lte'], self.last_key)
self.assertEqual(page.get(self.last_key), self.last_row)
def test_tree_read(self):
tree = Tree.from_gitobject(
MultiLevelKeyValue, self.repo.tree().trees[0],
)
self.assertScan(tree, 7)
self.assertEqual(tree.range['lte'], self.last_key)
self.assertEqual(tree.get(self.last_key), self.last_row)
def test_commit_read(self):
commit = Commit.from_gitobject(
MultiLevelKeyValue, self.repo.commit(),
)
self.assertScan(commit, 21)
self.assertEqual(commit.range['lte'], self.last_key)
self.assertEqual(commit.get(self.last_key), self.last_row)
| {
"content_hash": "7502c0dc1956d8f217b11ee2af835efa",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 69,
"avg_line_length": 32.22330097087379,
"alnum_prop": 0.5896354323591443,
"repo_name": "samv/unique",
"id": "a8a514493bcd2f09309a3a1f398e2af61795a05d",
"size": "3320",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_read.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15885"
}
],
"symlink_target": ""
} |
import os
import sys
import pygame
from common import mvc
import minimenu
from common.constants import *
from client.constants import *
from common.util.rect import Rect
class Model(mvc.Model):
def __init__(self):
super(Model, self).__init__()
tempRect = Rect( (50, 50), (200, 0) )
menuOptions = ["Combat Debug", "Map Debug", "Character Select Debug",
"Play Game Normally", "Exit"]
self.debugMenu = minimenu.MiniMenu(tempRect, menuOptions,
MINIMENU_FONT, (250, 250, 250),
(125, 125, 125), (15, 15, 15))
self.debugMenu.center(ENTIRE_SCREEN, True, True)
def incMenu(self):
self.debugMenu.inc()
def decMenu(self):
self.debugMenu.dec()
def confirm(self):
if not self.debugMenu.noSelection:
self.advanceNow = True
def update(self):
pass
def cancel(self):
pass
def mouseMoved(self, pos):
val = self.debugMenu.isAreaRect(pos)
if val > 0:
self.debugMenu.setVal(val)
else:
self.debugMenu.setVal(-1)
| {
"content_hash": "640513fc66bbcc2143afee049e363c9b",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 25.608695652173914,
"alnum_prop": 0.5568760611205433,
"repo_name": "Wopple/fimbulvetr",
"id": "18761968397a38e37564340c479b9095b00a0cd5",
"size": "1178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/client/debug_m.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "621329"
},
{
"name": "Shell",
"bytes": "85"
}
],
"symlink_target": ""
} |
from django_sqlalchemy.test import *
from django_sqlalchemy.backend import metadata
from django.db.models.manager import EmptyManager
class AnonymousUser(object):
id = None
username = ''
is_staff = False
is_active = False
is_superuser = False
groups = EmptyManager()
def __unicode__(self):
return 'AnonymousUser'
a = AnonymousUser()
class TestEmptyManager(object):
def setup(self):
pass
def test_should_return_empty_manager(self):
assert_list_same([], a.groups.all())
| {
"content_hash": "0c5ea7004457965e40bc5c98f8b22be5",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 49,
"avg_line_length": 21.4,
"alnum_prop": 0.6728971962616822,
"repo_name": "brosner/django-sqlalchemy",
"id": "2dfc91c8b948affb8362b25394728ff3f7992e15",
"size": "535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/managers/test_empty_manager.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "141002"
}
],
"symlink_target": ""
} |
from twisted.application import internet
from twisted.application.service import IServiceMaker
from twisted.internet import endpoints
from twisted.plugin import IPlugin
from twisted.python import reflect, usage
from zope.interface import implementer
from zope.interface.verify import verifyClass
DEFAULT_ENDPOINT = "tcp:2575"
DEFAULT_RECEIVER = "txHL7.receiver.LoggingReceiver"
class Options(usage.Options):
"""Define the options accepted by the ``twistd mllp`` plugin"""
synopsis = "[mllp options]"
optParameters = [
['endpoint', 'e', DEFAULT_ENDPOINT, 'The string endpoint on which to listen.'],
['receiver', 'r', DEFAULT_RECEIVER, 'A txHL7.receiver.IHL7Receiver subclass to handle messages.'],
]
longdesc = """\
Starts an MLLP server. If no arguments are specified,
it will be a demo server that logs and ACKs each message received."""
@implementer(IServiceMaker, IPlugin)
class MLLPServiceMaker(object):
"""Service maker for the MLLP server."""
tapname = "mllp"
description = "HL7 MLLP server."
options = Options
def makeService(self, options):
"""Construct a server using MLLPFactory.
:rtype: :py:class:`twisted.application.internet.StreamServerEndpointService`
"""
from twisted.internet import reactor
from txHL7.mllp import IHL7Receiver, MLLPFactory
receiver_name = options['receiver']
receiver_class = reflect.namedClass(receiver_name)
verifyClass(IHL7Receiver, receiver_class)
factory = MLLPFactory(receiver_class())
endpoint = endpoints.serverFromString(reactor, options['endpoint'])
server = internet.StreamServerEndpointService(endpoint, factory)
server.setName(u"mllp-{0}".format(receiver_name))
return server
serviceMaker = MLLPServiceMaker()
| {
"content_hash": "3e7d1fa40e2415a99945fc4edf78b092",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 106,
"avg_line_length": 35.84313725490196,
"alnum_prop": 0.7177242888402626,
"repo_name": "johnpaulett/txHL7",
"id": "96be799fcf259434a8e8589995646dbfda71713a",
"size": "1828",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twisted/plugins/mllp_plugin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "843"
},
{
"name": "Python",
"bytes": "16824"
}
],
"symlink_target": ""
} |
import requests
import json
# Given a government issued ID card from the UAE (emirates). Extract the holder's face and d aisplay all scanned fields.
#
# PixLab recommend that you connect your AWS S3 bucket via your dashboard at https://console.pixlab.io/
# so that any cropped face or MRZ crop is stored automatically on your S3 bucket rather than the PixLab one.
# This feature should give you full control over your analyzed media files.
#
# https://pixlab.io/cmd?id=docscan&&country=uae for additional information.
req = requests.get('https://api.pixlab.io/docscan',params={
'img':'https://pixlab.xyz/images/pixlab-uae-id.jpg', # UAE ID Card Input image (https://pixlab.xyz/images/pixlab-uae-id.jpg). POST method for direct upload is also supported
'type':'idcard', # We are expecting an ID card
'country': 'uae', # from the Emirates, we support also Malaysia/India/Singapore/US and Passports
'key':'PIXLAB_API_KEY' # Your PixLab API key. Get yours from https://console.pixlab.io/
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print (f"ID Card Holder's Face: {reply['face_url']}")
print ("Scanned ID Card Fields:\n\t")
if "issuingCountry" in reply['fields']:
print ("Issuing Country: " + reply['fields']['issuingCountry'])
if "documentNumber" in reply['fields']:
print ("Document Number: " + reply['fields']['documentNumber'])
if "fullName" in reply['fields']:
print ("Holder Full Name: " + reply['fields']['fullName'])
if "nationality" in reply['fields']:
print ("Holder's Nationality: " + reply['fields']['nationality'])
| {
"content_hash": "1df569695ece4ccd044234936bc8fff6",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 174,
"avg_line_length": 50.935483870967744,
"alnum_prop": 0.7181760607979734,
"repo_name": "symisc/pixlab",
"id": "7dccc556896f69c10fcc42c8e3f3e23a232d5579",
"size": "1579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/uae_emirates_id_card_scan.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "60145"
},
{
"name": "PHP",
"bytes": "37444"
},
{
"name": "Python",
"bytes": "53592"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import re
from pants.backend.core.tasks.console_task import ConsoleTask
from pants.base.build_environment import get_scm
from pants.base.exceptions import TaskError
from pants.base.lazy_source_mapper import LazySourceMapper
from pants.goal.workspace import ScmWorkspace
class ChangeCalculator(object):
"""A utility for calculating changed files or changed target addresses."""
def __init__(self,
scm,
workspace,
address_mapper,
build_graph,
fast=False,
changes_since=None,
diffspec=None,
include_dependees=None,
exclude_target_regexp=None,
spec_excludes=None):
self._scm = scm
self._workspace = workspace
self._address_mapper = address_mapper
self._build_graph = build_graph
self._fast = fast
self._changes_since = changes_since
self._diffspec = diffspec
self._include_dependees = include_dependees
self._exclude_target_regexp = exclude_target_regexp
self._spec_excludes = spec_excludes
self._mapper_cache = None
@property
def _mapper(self):
if self._mapper_cache is None:
self._mapper_cache = LazySourceMapper(self._address_mapper, self._build_graph, self._fast)
return self._mapper_cache
def changed_files(self):
"""Determines the files changed according to SCM/workspace and options."""
if self._diffspec:
return self._workspace.changes_in(self._diffspec)
else:
since = self._changes_since or self._scm.current_rev_identifier()
return self._workspace.touched_files(since)
def _directly_changed_targets(self):
# Internal helper to find target addresses containing SCM changes.
targets_for_source = self._mapper.target_addresses_for_source
return set(addr for src in self.changed_files() for addr in targets_for_source(src))
def _find_changed_targets(self):
# Internal helper to find changed targets, optionally including their dependees.
changed = self._directly_changed_targets()
# Skip loading the graph or doing any further work if no directly changed targets found.
if not changed:
return changed
if self._include_dependees == 'none':
return changed
# Load the whole build graph since we need it for dependee finding in either remaining case.
for address in self._address_mapper.scan_addresses(spec_excludes=self._spec_excludes):
self._build_graph.inject_address_closure(address)
if self._include_dependees == 'direct':
return changed.union(*[self._build_graph.dependents_of(addr) for addr in changed])
if self._include_dependees == 'transitive':
return set(t.address for t in self._build_graph.transitive_dependees_of_addresses(changed))
# Should never get here.
raise ValueError('Unknown dependee inclusion: "{}"'.format(self._include_dependees))
def changed_target_addresses(self):
"""Find changed targets, according to SCM.
This is the intended entry point for finding changed targets unless callers have a specific
reason to call one of the above internal helpers. It will find changed targets and:
- Optionally find changes in a given diffspec (commit, branch, tag, range, etc).
- Optionally include direct or transitive dependees.
- Optionally filter targets matching exclude_target_regexp.
:returns: A set of target addresses.
"""
# Find changed targets (and maybe their dependees).
changed = self._find_changed_targets()
# Remove any that match the exclude_target_regexp list.
excludes = [re.compile(pattern) for pattern in self._exclude_target_regexp]
return set([
t for t in changed if not any(exclude.search(t.spec) is not None for exclude in excludes)
])
class ChangedFileTaskMixin(object):
"""A mixin for tasks which require the set of targets (or files) changed according to SCM.
Changes are calculated relative to a ref/tree-ish (defaults to HEAD), and changed files are then
mapped to targets using LazySourceMapper. LazySourceMapper can optionally be used in "fast" mode,
which stops searching for additional owners for a given source once a one is found.
"""
@classmethod
def register_change_file_options(cls, register):
register('--fast', action='store_true', default=False,
help='Stop searching for owners once a source is mapped to at least owning target.')
register('--changes-since', '--parent',
help='Calculate changes since this tree-ish/scm ref (defaults to current HEAD/tip).')
register('--diffspec',
help='Calculate changes contained within given scm spec (commit range/sha/ref/etc).')
register('--include-dependees', choices=['none', 'direct', 'transitive'], default='none',
help='Include direct or transitive dependees of changed targets.')
@classmethod
def change_calculator(cls, options, address_mapper, build_graph, scm=None, workspace=None, spec_excludes=None):
scm = scm or get_scm()
if scm is None:
raise TaskError('No SCM available.')
workspace = workspace or ScmWorkspace(scm)
return ChangeCalculator(scm,
workspace,
address_mapper,
build_graph,
fast=options.fast,
changes_since=options.changes_since,
diffspec=options.diffspec,
include_dependees=options.include_dependees,
# NB: exclude_target_regexp is a global scope option registered
# elsewhere
exclude_target_regexp=options.exclude_target_regexp,
spec_excludes=spec_excludes)
class WhatChanged(ConsoleTask, ChangedFileTaskMixin):
"""Emits the targets that have been modified since a given commit."""
@classmethod
def register_options(cls, register):
super(WhatChanged, cls).register_options(register)
cls.register_change_file_options(register)
register('--files', action='store_true', default=False,
help='Show changed files instead of the targets that own them.')
def console_output(self, _):
spec_excludes = self.get_options().spec_excludes
change_calculator = self.change_calculator(self.get_options(),
self.context.address_mapper,
self.context.build_graph,
scm=self.context.scm,
workspace=self.context.workspace,
spec_excludes=spec_excludes)
if self.get_options().files:
for f in sorted(change_calculator.changed_files()):
yield f
else:
for addr in sorted(change_calculator.changed_target_addresses()):
yield addr.spec
| {
"content_hash": "56b5d3214c6191894720d1fdad8f3d45",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 113,
"avg_line_length": 42.76646706586826,
"alnum_prop": 0.6535984318118174,
"repo_name": "pgroudas/pants",
"id": "bc29f6ab99814373435db9fce3f2cb533dfffd85",
"size": "7289",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/core/tasks/what_changed.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "767"
},
{
"name": "CSS",
"bytes": "10984"
},
{
"name": "GAP",
"bytes": "4818"
},
{
"name": "HTML",
"bytes": "68090"
},
{
"name": "Java",
"bytes": "297674"
},
{
"name": "JavaScript",
"bytes": "10157"
},
{
"name": "Protocol Buffer",
"bytes": "6172"
},
{
"name": "Python",
"bytes": "2868194"
},
{
"name": "Scala",
"bytes": "105948"
},
{
"name": "Shell",
"bytes": "39579"
},
{
"name": "Thrift",
"bytes": "2824"
}
],
"symlink_target": ""
} |
"""
firval
======
a netfilter firewall rules generator designed
to be easy to read, write and maintain
How to use
==========
Write a yaml configuration file and feed it to firval,
it will produce a iptables-restore compatible rule file
it means you can do this:
cat rules.yaml | firval | iptables-restore
Configuration syntax
====================
interfaces:
IFNAME: PHYSICALINTERFACE
addresses:
ADDRNAME: HOSTADDR | NETADDR
ports:
PORTNAME: PORTNUMBER
chains:
filter|nat|mangle:
CHAINNAME:
- RULE
- ...
services:
SERVICENAME:
proto: tcp | udp | icmp
port: PORT-NUMBER(,PORT-NUMBER)* (only for tcp or udp)
type: ICMP-TYPE (only for icmp)
rulesets:
IFNAME-to-IFNAME:
filter|nat|mangle:
input|forward|output|...: (availability depends if in 'filter', 'nat' or 'mangle')
- RULE
- ...
RULE = ((accept|reject|drop|masquerade|log|nflog)
((not)? from ADDRNAME ((not)? port PORTNAME)?)?
((not)? to ADDRNAME ((not)? port PORTNAME)?)?
((not)? proto (tcp|udp|icmp|any))?
(service SERVICENAME)?
(state (new|established|invalid))?
(limit INTEGER/TIMEUNIT (burst INTEGER)?)?
(comment "COMMENT")?
(prefix "LOG_PREFIX"))
| (jump CHAINNAME)
"""
import sys
import re
import datetime
from voluptuous import Schema, Required, Optional, Any, All, Invalid, Match, In
from voluptuous import MultipleInvalid
from netaddr import IPNetwork
import yaml
import argparse
class ConfigError(Exception):
"""
Exception for Configuration Errors
"""
pass
class ParseError(Exception):
"""
Exception for Parsing Errors
"""
pass
class Firval(object):
"""
The main Firval class
"""
_re = {
'obj': '^[a-zA-Z0-9-_]{1,128}$',
'zone': '^[a-z0-9]+$',
'if': '^[a-z0-9:.]+$',
'ruleset': '^[a-z0-9_]+-to-[a-z0-9_]+$',
}
protocols = ('tcp', 'udp', 'icmp')
icmp_types = ('echo-reply', 'pong', 'destination-unreachable',
'network-unreachable', 'host-unreachable',
'protocol-unreachable', 'port-unreachable',
'fragmentation-needed', 'source-route-failed',
'network-unknown', 'host-unknown', 'network-prohibited',
'host-prohibited', 'TOS-network-unreachable',
'TOS-host-unreachable', 'communication-prohibited',
'host-precedence-violation', 'precedence-cutoff',
'source-quench', 'redirect', 'network-redirect',
'host-redirect', 'TOS-network-redirect', 'TOS-host-redirect',
'echo-request', 'ping', 'router-advertisement',
'router-solicitation', 'time-exceeded', 'ttl-exceeded',
'ttl-zero-during-transit', 'ttl-zero-during-reassembly',
'parameter-problem', 'ip-header-bad',
'required-option-missing', 'timestamp-request',
'timestamp-reply', 'address-mask-request',
'address-mask-reply')
_syschains = {
'filter': ('input', 'forward', 'output'),
'nat': ('prerouting', 'input', 'output', 'postrouting'),
'mangle': ('prerouting', 'input', 'forward', 'output', 'postrouting')
}
def __init__(self, obj):
"""
initializes the object
parameters:
obj: the datastructure representing the rules
"""
self.chains = []
self.data = self.validate(obj)
def _get_iface(self, name):
"""
get an interface name from config
parameters:
name: the symbolic interface name
returns:
the physical interface name
"""
try:
return self.data['interfaces'][name]
except KeyError:
return None
@staticmethod
def _valid_addr(address):
"""
object for voluptuous syntax validation
parameters:
address: an IP address or network
returns:
an IPNetwork object
"""
return IPNetwork(address)
@classmethod
def validate(cls, data):
"""
validates the data schema
parameters:
data: the data structure to validate
returns:
the validated data structure
"""
Schema({
Required('interfaces'): {
All(str, Match(cls._re['obj'])):
All(str, Match(cls._re['if']))
},
Optional('addresses'): {
All(str, Match(cls._re['obj'])):
All(str, cls._valid_addr)
},
Optional('ports'): {
All(str, Match(cls._re['obj'])):
All(int)
},
Optional('services'): {
All(str, Match(cls._re['obj'])): {
Required('proto'): All(str, In(cls.protocols)),
'port': Any(int,
Match(r'^[a-z-]+$'),
Match(r'^\d+(,\d+)*$')),
'type': All(str, In(cls.icmp_types)),
}
},
Optional('chains'): {
All(str, In(cls._syschains.keys())): {
All(str, Match(cls._re['obj'])):
[All(str, Match(Rule.pattern))]
}
},
'rulesets': {
All(str, Match(cls._re['ruleset'])): {
'filter': {
All(str, In(cls._syschains['filter'])):
[All(str, Match(Rule.pattern))],
},
'nat': {
All(str, In(cls._syschains['nat'])):
[All(str, Match(Rule.pattern))],
},
'mangle': {
All(str, In(cls._syschains['mangle'])):
[All(str, Match(Rule.pattern))],
}
}
}
})(data)
return data
def __str__(self):
"""
prints the rules represented by this object
returns:
string reprentation of the ruleset
"""
data = self.data
lne = []
if 'rulesets' not in data:
return ""
rules = {}
routing = {}
custchains = {}
# Rulesets Generation #################################################
for ruleset in data['rulesets']:
# Interfaces #####################################################
(izone, ozone) = re.match(r'^(\S+)-to-(\S+)$', ruleset).groups()
if izone == 'any':
iif = None
else:
iif = self._get_iface(izone)
if iif is None:
raise ConfigError("{0} interface is not defined".format(izone))
if ozone == 'any':
oif = None
else:
oif = self._get_iface(ozone)
if oif is None:
raise ConfigError("{0} interface is not defined".format(ozone))
# Tables ##########################################################
for table in data['rulesets'][ruleset]:
if table not in rules:
rules[table] = {}
if table not in routing:
routing[table] = {}
# Chains ######################################################
for chain in data['rulesets'][ruleset][table]:
# Routing #################################################
if chain not in routing[table]:
routing[table][chain] = []
rule = ['-A', chain.upper()]
if iif is not None:
rule.extend(['-i', iif])
if oif is not None:
rule.extend(['-o', oif])
rule.extend(['-j', '{0}-{1}'.format(chain, ruleset).lower()])
rule.extend(['-m', 'comment'])
rule.extend(['--comment', '"{0} {1} -> {2}"'.format(chain, izone, ozone)])
rulestr = ' '.join(rule)
# default rule comes last
if iif is None and oif is None:
routing[table][chain].append(rulestr)
elif iif is None or oif is None:
routing[table][chain].insert(
len(routing[table][chain]) - 1, rulestr)
else:
routing[table][chain].insert(0, rulestr)
# Rules ###################################################
if chain not in rules[table]:
rules[table][chain] = {}
rules[table][chain][ruleset] = []
for rule in data['rulesets'][ruleset][table][chain]:
rules[table][chain][ruleset].append('-A {0}-{1} {2}'.format(chain.lower(), ruleset.lower(), str(Rule(rule, aliases=self.data, table=table))))
# Custom Chains Generation ############################################
if 'chains' in data:
for table in data['chains']:
custchains[table] = {}
for chain in data['chains'][table]:
custchains[table][chain] = []
for rule in data['chains'][table][chain]:
custchains[table][chain].append('-A custom-{0} {1}'.format(chain.lower(), str(Rule(rule, aliases=self.data, table=table))))
# Rules Output ########################################################
lne = ['# generated by firval {0}'.format(datetime.datetime.now())]
# Tables ##############################################################
for table in rules:
if len(lne) > 1:
lne.append("COMMIT")
lne.append("*{0}".format(table))
# system chains
for chain in self._syschains[table]:
lne.append(':{0} ACCEPT [0:0]'.format(chain.upper()))
# custom routing chains
for chain in rules[table]:
for ruleset in rules[table][chain]:
lne.append(':{0}-{1} - [0:0]'.format(chain.lower(), ruleset.lower()))
# custom chains
if table in custchains:
for chain in custchains[table]:
lne.append(':custom-{0} - [0:0]'.format(chain.lower()))
# routing rules
for chain in routing[table]:
for rule in routing[table][chain]:
lne.append(rule)
# chain rules
for chain in rules[table]:
for ruleset in rules[table][chain]:
for rule in rules[table][chain][ruleset]:
lne.append(rule)
# custom chain rules
if table in custchains:
for chain in custchains[table]:
for rule in custchains[table][chain]:
lne.append(rule)
lne.append('COMMIT')
lne.append('# finished {0}'.format(datetime.datetime.now()))
return "\n".join(lne)
class Rule():
"""
object representing an iptables rule
"""
pattern = r'^\s*(' + \
r'(jump\s+(?P<jump_chain>\S+))|' + \
r'(?P<clampmss>clampmss)|' + \
r'(?P<setmss>setmss\s+(?P<max_mss>\d+))|' + \
r'(?P<action>accept|reject|drop|masquerade|log|nflog)' + \
r'(?:(?:\s+(?P<src_neg>not))?\s+from\s+(?P<src_addr>\S+)' + \
r'(?:(?:\s+(?P<src_port_neg>not))?\s+port\s+(?P<src_port>\S+))?)?' + \
r'(?:(?:\s+(?P<dst_neg>not))?\s+to\s+(?P<dst_addr>\S+)' + \
r'(?:(?:\s+(?P<dst_port_neg>not))?\s+port\s+(?P<dst_port>\S+))?)?' + \
r'(?:(?:\s+(?P<proto_neg>not))?\s+proto\s+(?P<proto>tcp|udp|icmp|any))?' + \
r'(?:(?:\s+(?P<icmp_type_neg>not))?\s+type\s+(?P<icmp_type>\S+))?' + \
r'(?:\s+service\s+(?P<service>\S+))?' + \
r'(?:\s+state\s+(?P<state>new|established|invalid))?' + \
r'(?:\s+limit\s+(?P<limit>\d+/\S)(?:\s+burst\s+(?P<limit_burst>\S+))?)?' + \
r'(?:\s+comment\s+(?P<comment>"[^"]+"))?' + \
r'(?:\s+prefix\s+(?P<log_prefix>"[^"]*"))?' + \
r')\s*$'
def __init__(self, text, aliases=None, table=None):
"""
initializes the Rule object
parameters:
text: the rule written with firval simplified syntax
aliases: address, ports, services and chains dictionnary
table: chains dictionnary for chain jumping
"""
self.comment = None
self.data = None
self._text = text
self._aliases = aliases if aliases is not None else {}
self._table = table if table is not None else ''
self.data = self.parse(text)
def __getattr__(self, name):
"""
retrieves an internal attribute
parameters:
name: the attribute name
returns:
the attribute value or None if not found
"""
if self.data is not None and name in self.data:
return self.data[name]
return None
@classmethod
def parse(cls, text):
"""
parse some text and return an attribute dict
parameters:
text: the rule text in firval language
returns:
an attribute dictionnary
"""
result = re.match(cls.pattern, text)
if result:
return result.groupdict()
else:
raise ParseError(text)
@staticmethod
def _is_any(value):
"""
check if a value is 'any' or equivalent (None)
parameters:
value: the value to check
returns:
True or False
"""
return value is None or value == 'any'
def _get_address(self, name):
"""
get an address from the address table
parameters:
name: the name associated with the address
returns:
the address associated with the name
"""
try:
return self._aliases['addresses'][name]
except KeyError:
return name
def _get_port(self, name):
"""
get a port from the port table
parameters:
name: the name associated with the port
returns:
the port associated with the name
"""
try:
return self._aliases['ports'][name]
except KeyError:
return name
def _get_service(self, name):
"""
get a service from the service table
parameters:
name: the name associated with the service
returns:
the service associated with the name
"""
try:
return self._aliases['services'][name]
except KeyError:
return None
def _get_chain(self, table, name):
"""
get a chain from the chains table
parameters:
table: the table in which the chain is
name: the name associated with the chain
returns:
the chain associated with the name
"""
try:
return self._aliases['chains'][table][name]
except KeyError:
return None
def __repr__(self):
return self.__class__.__name__ + '(' + self._text + ')'
def __str__(self):
"""
the processed string representation of this rule
returns:
the string representation of this rule
"""
rule = []
# Source address
if not self._is_any(self.src_addr):
if self.src_neg is not None:
rule.append('!')
rule.extend(['-s', str(self._get_address(self.src_addr))])
# Destination address
if not self._is_any(self.dst_addr):
if self.dst_neg is not None:
rule.append('!')
rule.extend(['-d', str(self._get_address(self.dst_addr))])
# Protocol
if not self._is_any(self.proto):
if self.proto_neg is not None:
rule.append('!')
rule.extend(['-p', str(self.proto)])
# Source port
if not self._is_any(self.src_port):
if self._is_any(self.proto):
raise ConfigError("protocol must be set when using port in '{0}'".format(self._text))
if self.src_port_neg is not None:
rule.append('!')
rule.extend(['--sport', str(self._get_port(self.src_port))])
# Destination port
if not self._is_any(self.dst_port):
if self._is_any(self.proto):
raise ConfigError("protocol must be set when using port in '{0}'".format(self._text))
if self.dst_port_neg is not None:
rule.append('!')
rule.extend(['--dport', str(self._get_port(self.dst_port))])
# ICMP Type
if not self._is_any(self.icmp_type):
if self._is_any(self.proto):
raise ConfigError("protocol must be set when using icmp-type in '{0}'".format(self._text))
if self.proto != 'icmp':
raise ConfigError("protocol must be 'icmp' when using icmp-type in '{0}'".format(self._text))
if self.icmp_type_neg is not None:
rule.append('!')
rule.extend(['--icmp-type', str(self.icmp_type)])
# Service
if self.service is not None:
if not self._is_any(self.dst_port) or not self._is_any(self.proto):
raise ConfigError('service conflicts with dport or proto:', self.service)
service = self._get_service(self.service)
if service is None:
raise ConfigError('unknown service: ' + self.service)
rule.extend(['-p', service['proto']])
if service['proto'] in ['tcp', 'udp']:
if re.match(r'^\d+(,\d+)*$', str(service['port'])):
ports = re.split(',', str(service['port']))
if len(ports) > 1:
rule.extend(['-m', 'multiport'])
rule.extend(['--dports', str(service['port'])])
else:
rule.extend(['--dport', str(service['port'])])
else:
rule.extend(['--dport', str(service['port'])])
# State
if not self._is_any(self.state):
if self.state == 'new':
rule.extend(['-m', 'state', '--state', 'NEW'])
elif self.state == 'established':
rule.extend(['-m', 'state', '--state', 'ESTABLISHED,RELATED'])
elif self.state == 'invalid':
rule.extend(['-m', 'state', '--state', 'INVALID'])
# Limit
if self.limit is not None:
rule.extend(['-m', 'limit', '--limit', str(self.limit)])
if not self._is_any(self.limit_burst):
rule.extend(['--limit-burst', str(self.limit_burst)])
# Actions
if self.action is not None:
rule.extend(['-j', str(self.action.upper())])
if self.action == 'reject':
rule.extend(['--reject-with', 'icmp-host-prohibited'])
# Prefix
if self.log_prefix is not None:
if self.action == 'log':
rule.extend(['--log-prefix', str(self.log_prefix)])
elif self.action == 'nflog':
rule.extend(['--nflog-prefix', str(self.log_prefix)])
else:
raise ConfigError("log prefix requires 'log' or 'nflog' action")
# Jump to custom chain
if self.jump_chain is not None:
if self._get_chain(self._table, self.jump_chain):
rule.extend(['-j', 'custom-{0}'.format(self.jump_chain)])
else:
raise ConfigError("unknown chain: " + self.jump_chain)
# Special Cases
if self.clampmss is not None:
rule.extend(['-p', 'tcp', '--tcp-flags', 'SYN,RST', 'SYN'])
rule.extend(['-j', 'TCPMSS', '--clamp-mss-to-pmtu'])
elif self.setmss is not None:
rule.extend(['-p', 'tcp', '--tcp-flags', 'SYN,RST', 'SYN'])
rule.extend(['-j', 'TCPMSS'])
rule.extend(['--set-mss', '{0}'.format(self.max_mss)])
# Comment
if self.comment is None:
self.comment = '"' + re.sub('"', '\\"', self._text) + '"'
rule.extend(['-m', 'comment', '--comment', str(self.comment)])
return ' '.join(rule)
def main():
"""
command-line version of the lib
"""
try:
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str, nargs='?', default='-',
help='a yaml rules file')
args = parser.parse_args()
if args.file == '-':
print(str(Firval(yaml.load(sys.stdin))))
else:
with open(args.file, 'r') as fd:
print(str(Firval(yaml.load(fd))))
except yaml.parser.ParserError as ex:
print("# firval: yaml parsing error: " + str(ex).replace("\n", ""))
except MultipleInvalid as ex:
print("# firval: config structure error: " + str(ex).replace("\n", ""))
except ParseError as ex:
print("# firval: rule parsing error: " + str(ex).replace("\n", ""))
except ConfigError as ex:
print("# firval: config error: " + str(ex).replace("\n", ""))
except KeyboardInterrupt as ex:
print("# firval: keyboard interrupt")
except Exception as ex:
print("# firval: error: " + str(ex).replace("\n", ""))
| {
"content_hash": "4c00dfd96355dd67fd7b6f59ac201016",
"timestamp": "",
"source": "github",
"line_count": 651,
"max_line_length": 165,
"avg_line_length": 33.795698924731184,
"alnum_prop": 0.47638743693468477,
"repo_name": "nlm/firval",
"id": "4fd39cc573bab2a39ab3b8fa5fd70c2041143481",
"size": "22001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "firval/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27171"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
# layout.py
# ---------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and Pieter
# Abbeel in Spring 2013.
# For more info, see http://inst.eecs.berkeley.edu/~cs188/pacman/pacman.html
from builtins import open
from builtins import int
from future import standard_library
standard_library.install_aliases()
from builtins import zip
from builtins import range
from builtins import object
from .util import manhattanDistance
from .game import Grid
import os
import random
from functools import reduce
VISIBILITY_MATRIX_CACHE = {}
class Layout(object):
"""
A Layout manages the static information about the game board.
"""
def __init__(self, layoutText):
self.width = len(layoutText[0])
self.height = len(layoutText)
self.walls = Grid(self.width, self.height, False)
self.food = Grid(self.width, self.height, False)
self.capsules = []
self.agentPositions = []
self.numGhosts = 0
self.processLayoutText(layoutText)
self.layoutText = layoutText
# self.initializeVisibilityMatrix()
def getNumGhosts(self):
return self.numGhosts
def initializeVisibilityMatrix(self):
global VISIBILITY_MATRIX_CACHE
if reduce(str.__add__, self.layoutText) not in VISIBILITY_MATRIX_CACHE:
from .game import Directions
vecs = [(-0.5, 0), (0.5, 0), (0, -0.5), (0, 0.5)]
dirs = [
Directions.NORTH,
Directions.SOUTH,
Directions.WEST,
Directions.EAST]
vis = Grid(
self.width,
self.height,
{Directions.NORTH: set(),
Directions.SOUTH: set(),
Directions.EAST: set(),
Directions.WEST: set(),
Directions.STOP: set()})
for x in range(self.width):
for y in range(self.height):
if self.walls[x][y] == False:
for vec, direction in zip(vecs, dirs):
dx, dy = vec
nextx, nexty = x + dx, y + dy
while (nextx + nexty) != int(nextx) + int(nexty) or not self.walls[int(nextx)][int(nexty)]:
vis[x][y][direction].add((nextx, nexty))
nextx, nexty = x + dx, y + dy
self.visibility = vis
VISIBILITY_MATRIX_CACHE[reduce(str.__add__, self.layoutText)] = vis
else:
self.visibility = VISIBILITY_MATRIX_CACHE[
reduce(str.__add__, self.layoutText)]
def isWall(self, pos):
x, col = pos
return self.walls[x][col]
def getRandomLegalPosition(self):
x = random.choice(list(range(self.width)))
y = random.choice(list(range(self.height)))
while self.isWall((x, y)):
x = random.choice(list(range(self.width)))
y = random.choice(list(range(self.height)))
return (x, y)
def getRandomCorner(self):
poses = [(1, 1), (1, self.height - 2), (self.width - 2, 1),
(self.width - 2, self.height - 2)]
return random.choice(poses)
def getFurthestCorner(self, pacPos):
poses = [(1, 1), (1, self.height - 2), (self.width - 2, 1),
(self.width - 2, self.height - 2)]
dist, pos = max([(manhattanDistance(p, pacPos), p) for p in poses])
return pos
def isVisibleFrom(self, ghostPos, pacPos, pacDirection):
row, col = [int(x) for x in pacPos]
return ghostPos in self.visibility[row][col][pacDirection]
def __str__(self):
return "\n".join(self.layoutText)
def deepCopy(self):
return Layout(self.layoutText[:])
def processLayoutText(self, layoutText):
"""
Coordinates are flipped from the input format to the (x,y) convention here
The shape of the maze. Each character
represents a different type of object.
% - Wall
. - Food
o - Capsule
G - Ghost
P - Pacman
Other characters are ignored.
"""
maxY = self.height - 1
for y in range(self.height):
for x in range(self.width):
layoutChar = layoutText[maxY - y][x]
self.processLayoutChar(x, y, layoutChar)
self.agentPositions.sort()
self.agentPositions = [(i == 0, pos) for i, pos in self.agentPositions]
def processLayoutChar(self, x, y, layoutChar):
if layoutChar == '%':
self.walls[x][y] = True
elif layoutChar == '.':
self.food[x][y] = True
elif layoutChar == 'o':
self.capsules.append((x, y))
elif layoutChar == 'P':
self.agentPositions.append((0, (x, y)))
elif layoutChar in ['G']:
self.agentPositions.append((1, (x, y)))
self.numGhosts += 1
elif layoutChar in ['1', '2', '3', '4']:
self.agentPositions.append((int(layoutChar), (x, y)))
self.numGhosts += 1
def getLayout(name, back=2):
if name.endswith('.lay'):
layout = tryToLoad('layouts/' + name)
if layout is None:
layout = tryToLoad(name)
else:
layout = tryToLoad('layouts/' + name + '.lay')
if layout is None:
layout = tryToLoad(name + '.lay')
if layout is None and back >= 0:
curdir = os.path.abspath('.')
os.chdir('..')
layout = getLayout(name, back - 1)
os.chdir(curdir)
return layout
def tryToLoad(fullname):
if(not os.path.exists(fullname)):
return None
f = open(fullname)
try:
return Layout([line.strip() for line in f])
finally:
f.close()
| {
"content_hash": "27857031d0b45f7985d41411a49ca1b6",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 119,
"avg_line_length": 34.81666666666667,
"alnum_prop": 0.5674166267751716,
"repo_name": "rlpy/rlpy",
"id": "e5b8f3ce9f1718aaddebb4223d76f28929a9b768",
"size": "6267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rlpy/Domains/PacmanPackage/layout.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "117712"
},
{
"name": "C++",
"bytes": "1601"
},
{
"name": "PLSQL",
"bytes": "787682"
},
{
"name": "Python",
"bytes": "1215456"
}
],
"symlink_target": ""
} |
from __future__ import print_function, unicode_literals, division, absolute_import
import sys
from dxpy import DXHTTPRequest
from dxpy.utils import Nonce
def analysis_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def analysis_terminate(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /analysis-xxxx/terminate API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fanalysis-xxxx%2Fterminate
"""
return DXHTTPRequest('/%s/terminate' % object_id, input_params, always_retry=always_retry, **kwargs)
def app_add_authorized_users(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addAuthorizedUsers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_add_categories(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addCategories
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_add_developers(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addDevelopers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_add_tags(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/addTags
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/addTags' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_delete(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/delete API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/delete
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/delete' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_describe(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/describe
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/describe' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_get(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/get API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/get
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/get' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_install(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/install API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/install
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/install' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_list_authorized_users(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/listAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/listAuthorizedUsers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_list_categories(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/listCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/listCategories
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_list_developers(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/listDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/listDevelopers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/listDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_publish(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/publish API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/publish
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/publish' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_authorized_users(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeAuthorizedUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeAuthorizedUsers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeAuthorizedUsers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_categories(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeCategories API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeCategories
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeCategories' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_developers(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeDevelopers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeDevelopers
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeDevelopers' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_remove_tags(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/removeTags
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/removeTags' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_run(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/run
"""
input_params_cp = Nonce.update_nonce(input_params)
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/run' % fully_qualified_version, input_params_cp, always_retry=always_retry, **kwargs)
def app_uninstall(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/uninstall API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/uninstall
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/uninstall' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_update(app_name_or_id, alias=None, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app-xxxx%5B/yyyy%5D/update
"""
fully_qualified_version = app_name_or_id + (('/' + alias) if alias else '')
return DXHTTPRequest('/%s/update' % fully_qualified_version, input_params, always_retry=always_retry, **kwargs)
def app_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /app/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app/new
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/app/new', input_params_cp, always_retry=always_retry, **kwargs)
def applet_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_get(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/get API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2Fget
"""
return DXHTTPRequest('/%s/get' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_run(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet-xxxx%2Frun
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/%s/run' % object_id, input_params_cp, always_retry=always_retry, **kwargs)
def applet_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def applet_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /applet/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fapplet%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/applet/new', input_params_cp, always_retry=always_retry, **kwargs)
def container_clone(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/clone API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2Fclone
"""
return DXHTTPRequest('/%s/clone' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Containers-for-Execution#API-method%3A-%2Fcontainer-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_destroy(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/destroy API method.
"""
return DXHTTPRequest('/%s/destroy' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_list_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/listFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FlistFolder
"""
return DXHTTPRequest('/%s/listFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_move(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/move API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2Fmove
"""
return DXHTTPRequest('/%s/move' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_new_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /container-xxxx/newFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FnewFolder
"""
return DXHTTPRequest('/%s/newFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_remove_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
"""
return DXHTTPRequest('/%s/removeFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_remove_objects(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/removeObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveObjects
"""
return DXHTTPRequest('/%s/removeObjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def container_rename_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /container-xxxx/renameFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FrenameFolder
"""
return DXHTTPRequest('/%s/renameFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_download(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/download API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fdownload
"""
return DXHTTPRequest('/%s/download' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_upload(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file-xxxx/upload API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile-xxxx%2Fupload
"""
return DXHTTPRequest('/%s/upload' % object_id, input_params, always_retry=always_retry, **kwargs)
def file_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /file/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Files#API-method%3A-%2Ffile%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/file/new', input_params_cp, always_retry=always_retry, **kwargs)
def gtable_add_rows(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/addRows API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2FaddRows
"""
return DXHTTPRequest('/%s/addRows' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_get(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/get API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2Fget
"""
return DXHTTPRequest('/%s/get' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_next_part(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/nextPart API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable-xxxx%2FnextPart
"""
return DXHTTPRequest('/%s/nextPart' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /gtable-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def gtable_new(input_params={}, always_retry=False, **kwargs):
"""
Invokes the /gtable/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/GenomicTables#API-method%3A-%2Fgtable%2Fnew
"""
return DXHTTPRequest('/gtable/new', input_params, always_retry=always_retry, **kwargs)
def job_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_get_log(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /job-xxxx/getLog API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FgetLog
"""
return DXHTTPRequest('/%s/getLog' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_terminate(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job-xxxx/terminate API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2Fterminate
"""
return DXHTTPRequest('/%s/terminate' % object_id, input_params, always_retry=always_retry, **kwargs)
def job_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /job/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/job/new', input_params_cp, always_retry=always_retry, **kwargs)
def notifications_get(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /notifications/get API method.
"""
return DXHTTPRequest('/notifications/get', input_params, always_retry=always_retry, **kwargs)
def notifications_mark_read(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /notifications/markRead API method.
"""
return DXHTTPRequest('/notifications/markRead', input_params, always_retry=always_retry, **kwargs)
def org_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_find_members(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/findMembers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindMembers
"""
return DXHTTPRequest('/%s/findMembers' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_find_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/findProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindProjects
"""
return DXHTTPRequest('/%s/findProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_find_apps(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/findApps API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindApps
"""
return DXHTTPRequest('/%s/findApps' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_invite(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/invite API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2Finvite
"""
return DXHTTPRequest('/%s/invite' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_remove_member(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/removeMember API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FremoveMember
"""
return DXHTTPRequest('/%s/removeMember' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_set_member_access(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/setMemberAccess API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FsetMemberAccess
"""
return DXHTTPRequest('/%s/setMemberAccess' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def org_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /org/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/org/new', input_params_cp, always_retry=always_retry, **kwargs)
def project_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_clone(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/clone API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2Fclone
"""
return DXHTTPRequest('/%s/clone' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_decrease_permissions(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/decreasePermissions API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2FdecreasePermissions
"""
return DXHTTPRequest('/%s/decreasePermissions' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_destroy(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/destroy API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fdestroy
"""
return DXHTTPRequest('/%s/destroy' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_invite(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/invite API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2Finvite
"""
return DXHTTPRequest('/%s/invite' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_leave(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/leave API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2Fleave
"""
return DXHTTPRequest('/%s/leave' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_list_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/listFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FlistFolder
"""
return DXHTTPRequest('/%s/listFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_move(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/move API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2Fmove
"""
return DXHTTPRequest('/%s/move' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_new_folder(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/newFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FnewFolder
"""
return DXHTTPRequest('/%s/newFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_remove_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/removeFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveFolder
"""
return DXHTTPRequest('/%s/removeFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_remove_objects(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/removeObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FremoveObjects
"""
return DXHTTPRequest('/%s/removeObjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_rename_folder(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project-xxxx/renameFolder API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FrenameFolder
"""
return DXHTTPRequest('/%s/renameFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_transfer(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/transfer API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2Ftransfer
"""
return DXHTTPRequest('/%s/transfer' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_update_sponsorship(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/updateSponsorship API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2FupdateSponsorship
"""
return DXHTTPRequest('/%s/updateSponsorship' % object_id, input_params, always_retry=always_retry, **kwargs)
def project_new(input_params={}, always_retry=False, **kwargs):
"""
Invokes the /project/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject%2Fnew
"""
return DXHTTPRequest('/project/new', input_params, always_retry=always_retry, **kwargs)
def record_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Data-Object-Lifecycle#API-method%3A-%2Fclass-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Records#API-method%3A-%2Frecord-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def record_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Records#API-method%3A-%2Frecord%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/record/new', input_params_cp, always_retry=always_retry, **kwargs)
def system_describe_data_objects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/describeDataObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeDataObjects
"""
return DXHTTPRequest('/system/describeDataObjects', input_params, always_retry=always_retry, **kwargs)
def system_describe_executions(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/describeExecutions API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeExecutions
"""
return DXHTTPRequest('/system/describeExecutions', input_params, always_retry=always_retry, **kwargs)
def system_describe_projects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/describeProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeProjects
"""
return DXHTTPRequest('/system/describeProjects', input_params, always_retry=always_retry, **kwargs)
def system_find_affiliates(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findAffiliates API method.
"""
return DXHTTPRequest('/system/findAffiliates', input_params, always_retry=always_retry, **kwargs)
def system_find_apps(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findApps API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindApps
"""
return DXHTTPRequest('/system/findApps', input_params, always_retry=always_retry, **kwargs)
def system_find_data_objects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findDataObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindDataObjects
"""
return DXHTTPRequest('/system/findDataObjects', input_params, always_retry=always_retry, **kwargs)
def system_resolve_data_objects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/resolveDataObjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/resolveDataObjects
"""
return DXHTTPRequest('/system/resolveDataObjects', input_params, always_retry=always_retry, **kwargs)
def system_find_executions(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findExecutions API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindExecutions
"""
return DXHTTPRequest('/system/findExecutions', input_params, always_retry=always_retry, **kwargs)
def system_find_analyses(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findAnalyses API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindAnalyses
"""
return DXHTTPRequest('/system/findAnalyses', input_params, always_retry=always_retry, **kwargs)
def system_find_jobs(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findJobs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindJobs
"""
return DXHTTPRequest('/system/findJobs', input_params, always_retry=always_retry, **kwargs)
def system_find_projects(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindProjects
"""
return DXHTTPRequest('/system/findProjects', input_params, always_retry=always_retry, **kwargs)
def system_find_users(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findUsers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method%3A-%2Fsystem%2FfindUsers
"""
return DXHTTPRequest('/system/findUsers', input_params, always_retry=always_retry, **kwargs)
def system_find_project_members(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findProjectMembers API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/findProjectMembers
"""
return DXHTTPRequest('/system/findProjectMembers', input_params, always_retry=always_retry, **kwargs)
def system_find_orgs(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/findOrgs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/findOrgs
"""
return DXHTTPRequest('/system/findOrgs', input_params, always_retry=always_retry, **kwargs)
def system_global_search(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/globalSearch API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/globalSearch
"""
return DXHTTPRequest('/system/globalSearch', input_params, always_retry=always_retry, **kwargs)
def system_greet(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/greet API method.
"""
return DXHTTPRequest('/system/greet', input_params, always_retry=always_retry, **kwargs)
def system_headers(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/headers API method.
"""
return DXHTTPRequest('/system/headers', input_params, always_retry=always_retry, **kwargs)
def system_shorten_url(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/shortenURL API method.
"""
return DXHTTPRequest('/system/shortenURL', input_params, always_retry=always_retry, **kwargs)
def system_whoami(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /system/whoami API method.
"""
return DXHTTPRequest('/system/whoami', input_params, always_retry=always_retry, **kwargs)
def user_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /user-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Users#API-method%3A-%2Fuser-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def user_update(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /user-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Users#API-method%3A-%2Fuser-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_add_stage(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/addStage API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FaddStage
"""
return DXHTTPRequest('/%s/addStage' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_add_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/addTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FaddTags
"""
return DXHTTPRequest('/%s/addTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_add_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/addTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FaddTypes
"""
return DXHTTPRequest('/%s/addTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Data-Object-Lifecycle#API-method%3A-%2Fclass-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_describe(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/describe API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fdescribe
"""
return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_dry_run(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/dryRun API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FdryRun
"""
return DXHTTPRequest('/%s/dryRun' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_get_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/getDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FgetDetails
"""
return DXHTTPRequest('/%s/getDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_is_stage_compatible(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/isStageCompatible API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FisStageCompatible
"""
return DXHTTPRequest('/%s/isStageCompatible' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_list_projects(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/listProjects API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2FlistProjects
"""
return DXHTTPRequest('/%s/listProjects' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_move_stage(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/moveStage API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FmoveStage
"""
return DXHTTPRequest('/%s/moveStage' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_overwrite(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/overwrite API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Foverwrite
"""
return DXHTTPRequest('/%s/overwrite' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_remove_stage(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/removeStage API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FremoveStage
"""
return DXHTTPRequest('/%s/removeStage' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_remove_tags(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/removeTags API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Tags#API-method%3A-%2Fclass-xxxx%2FremoveTags
"""
return DXHTTPRequest('/%s/removeTags' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_remove_types(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/removeTypes API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Types#API-method%3A-%2Fclass-xxxx%2FremoveTypes
"""
return DXHTTPRequest('/%s/removeTypes' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_rename(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/rename API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename
"""
return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_run(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/run API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Frun
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/%s/run' % object_id, input_params_cp, always_retry=always_retry, **kwargs)
def workflow_set_details(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setDetails API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Details-and-Links#API-method%3A-%2Fclass-xxxx%2FsetDetails
"""
return DXHTTPRequest('/%s/setDetails' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_stage_inputs(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setStageInputs API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FsetStageInputs
"""
return DXHTTPRequest('/%s/setStageInputs' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_set_visibility(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/setVisibility API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Visibility#API-method%3A-%2Fclass-xxxx%2FsetVisibility
"""
return DXHTTPRequest('/%s/setVisibility' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_update_stage_executable(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow-xxxx/updateStageExecutable API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2FupdateStageExecutable
"""
return DXHTTPRequest('/%s/updateStageExecutable' % object_id, input_params, always_retry=always_retry, **kwargs)
def workflow_new(input_params={}, always_retry=True, **kwargs):
"""
Invokes the /workflow/new API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow%2Fnew
"""
input_params_cp = Nonce.update_nonce(input_params)
return DXHTTPRequest('/workflow/new', input_params_cp, always_retry=always_retry, **kwargs)
| {
"content_hash": "cfd0e9549e3998d1f6d34b2aad7de11a",
"timestamp": "",
"source": "github",
"line_count": 1372,
"max_line_length": 158,
"avg_line_length": 47.96355685131196,
"alnum_prop": 0.7071087742759019,
"repo_name": "jhuttner/dx-toolkit",
"id": "49d7ee2af7ac0cd28502121d41a4257ae9b93574",
"size": "65966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/dxpy/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3198"
},
{
"name": "C",
"bytes": "6957"
},
{
"name": "C++",
"bytes": "1880260"
},
{
"name": "CMake",
"bytes": "26162"
},
{
"name": "Groovy",
"bytes": "8855"
},
{
"name": "Java",
"bytes": "2177401"
},
{
"name": "Makefile",
"bytes": "50221"
},
{
"name": "NSIS",
"bytes": "17861"
},
{
"name": "Perl",
"bytes": "46855"
},
{
"name": "PowerShell",
"bytes": "1442"
},
{
"name": "Python",
"bytes": "2261586"
},
{
"name": "R",
"bytes": "550095"
},
{
"name": "Ruby",
"bytes": "78045"
},
{
"name": "Shell",
"bytes": "58977"
}
],
"symlink_target": ""
} |
import base64
import calendar
import hashlib
import hmac
import json
import time
from libsaas import parsers, http, port
from libsaas.services import base
from . import resources
class Mozscape(base.Resource):
EXPIRES = 300
timesource = time.gmtime
def __init__(self, access_id, secret_key):
"""
Create a Mozscape service.
:var access_id: Your Mozscape AccessID.
:vartype access_id: str
:var secret_key: Your Mozscape Secret Key.
:vartype secret_key: str
"""
self.apiroot = 'https://lsapi.seomoz.com/linkscape'
self.access_id = access_id
self.secret_key = secret_key
self.add_filter(self.add_api_root)
self.add_filter(self.sign_request)
def add_api_root(self, request):
request.uri = self.apiroot + request.uri
def sign_request(self, request):
if getattr(request, 'nosign', False):
return
expires = str(calendar.timegm(self.timesource()) + self.EXPIRES)
to_sign = port.to_b(self.access_id + '\n' + expires)
signature = hmac.new(port.to_b(self.secret_key), to_sign, hashlib.sha1).digest()
request.params['AccessID'] = self.access_id
request.params['Expires'] = expires
request.params['Signature'] = port.to_u(base64.b64encode(signature))
@base.apimethod
def urlmetrics(self, urls, cols):
"""
Fetch URL metrics for one or more URLs.
:var urls: The URLs you're interested in.
:vartype urls: str or list of str
:var cols: The sum of column constants for metrics you want to have
fetched, taken from `libsaas.services.mozscape.constants`.
"""
if isinstance(urls, list):
return self.list_urlmetrics(urls, str(cols))
uri = '/url-metrics/{0}/'.format(http.quote_any(urls))
request = http.Request('GET', uri, {'Cols': str(cols)})
return request, parsers.parse_json
def list_urlmetrics(self, urls, cols):
# For url-metrics the URLs are passed as POST body, but the remaining
# parameters should still be in the URL. Work around this by manually
# generating the signature and then replacing the body.
request = http.Request('POST', '')
self.sign_request(request)
request.nosign = True
request.params['Cols'] = cols
uri = '/url-metrics/?' + http.urlencode_any(request.params)
request.uri = uri
request.params = json.dumps(urls)
return request, parsers.parse_json
@base.resource(resources.Metadata)
def metadata(self):
"""
Return the resource responsible for Mozscape Index metadata.
"""
return resources.Metadata(self)
| {
"content_hash": "361997f5e514aa4b8c9bb2cdf0cf4c3a",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 88,
"avg_line_length": 30.406593406593405,
"alnum_prop": 0.6277556920852909,
"repo_name": "ducksboard/libsaas",
"id": "280349895d3964ea7fc92a871bd056a64b57efe4",
"size": "2767",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "libsaas/services/mozscape/service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "954078"
}
],
"symlink_target": ""
} |
from django import forms
class QuestionForm(forms.Form):
question_text = forms.CharField(
widget=forms.Textarea,
max_length=300,
label='Ask something!',
required=True
)
anonymous = forms.BooleanField(
widget=forms.CheckboxInput,
label='Ask Anonymously',
required=False
)
def clean_question_text(self):
question_text = self.cleaned_data.get('question_text')
if question_text:
question_text = question_text.strip()
return question_text
| {
"content_hash": "2a406bdd22582e77ee6899a413953494",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 62,
"avg_line_length": 26.142857142857142,
"alnum_prop": 0.6229508196721312,
"repo_name": "shakib609/AskFmClone",
"id": "291a014034b7c1b32c57398a8959f5dae1323310",
"size": "549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/askfm/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2283"
},
{
"name": "HTML",
"bytes": "21162"
},
{
"name": "JavaScript",
"bytes": "1012"
},
{
"name": "Python",
"bytes": "32091"
},
{
"name": "Shell",
"bytes": "158"
}
],
"symlink_target": ""
} |
"""Multimodal block environments for the XArm."""
import collections
import math
from typing import Dict, List, Optional, Union
import gin
from gym import spaces
from gym.envs import registration
from ibc.environments.block_pushing import block_pushing
from ibc.environments.block_pushing import metrics as block_pushing_metrics
from ibc.environments.utils import utils_pybullet
from ibc.environments.utils.pose3d import Pose3d
from ibc.environments.utils.utils_pybullet import ObjState
from ibc.environments.utils.utils_pybullet import XarmState
import numpy as np
from scipy.spatial import transform
import pybullet
import pybullet_utils.bullet_client as bullet_client
# pytype: skip-file
BLOCK2_URDF_PATH = 'third_party/py/ibc/environments/assets/block2.urdf'
ZONE2_URDF_PATH = 'third_party/py/ibc/environments/assets/zone2.urdf'
# When resetting multiple targets, they should all be this far apart.
MIN_BLOCK_DIST = 0.1
MIN_TARGET_DIST = 0.12
# pylint: enable=line-too-long
NUM_RESET_ATTEMPTS = 1000
@gin.configurable
def build_env_name(task, shared_memory, use_image_obs):
"""Construct the env name from parameters."""
del task
env_name = 'BlockPushMultimodal'
if use_image_obs:
env_name = env_name + 'Rgb'
if shared_memory:
env_name = 'Shared' + env_name
env_name = env_name + '-v0'
return env_name
@gin.configurable
class BlockPushMultimodal(block_pushing.BlockPush):
"""2 blocks, 2 targets."""
def get_metrics(self, num_episodes):
metrics = [block_pushing_metrics.AverageSuccessMetric(
self, buffer_size=num_episodes)]
success_metric = metrics[-1]
return metrics, success_metric
def __init__(self,
control_frequency=10.0,
task=block_pushing.BlockTaskVariant.PUSH,
image_size=None,
shared_memory=False,
seed=None,
goal_dist_tolerance=0.04):
"""Creates an env instance.
Args:
control_frequency: Control frequency for the arm. Each env step will
advance the simulation by 1/control_frequency seconds.
task: enum for which task, see BlockTaskVariant enum.
image_size: Optional image size (height, width). If None, no image
observations will be used.
shared_memory: If True `pybullet.SHARED_MEMORY` is used to connect to
pybullet. Useful to debug.
seed: Optional seed for the environment.
goal_dist_tolerance: float, how far away from the goal to terminate.
"""
self._target_ids = None
self._target_poses = None
super(BlockPushMultimodal, self).__init__(
control_frequency=control_frequency,
task=task,
image_size=image_size,
shared_memory=shared_memory,
seed=seed,
goal_dist_tolerance=goal_dist_tolerance)
@property
def target_poses(self):
return self._target_poses
def get_goal_translation(self):
"""Return the translation component of the goal (2D)."""
if self._target_poses:
return [i.translation for i in self._target_poses]
else:
return None
def _setup_pybullet_scene(self):
self._pybullet_client = bullet_client.BulletClient(self._connection_mode)
# Temporarily disable rendering to speed up loading URDFs.
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 0)
self._setup_workspace_and_robot()
self._target_ids = [
utils_pybullet.load_urdf(self._pybullet_client, i, useFixedBase=True)
for i in [block_pushing.ZONE_URDF_PATH, ZONE2_URDF_PATH]]
self._block_ids = []
for i in [block_pushing.BLOCK_URDF_PATH, BLOCK2_URDF_PATH]:
self._block_ids.append(utils_pybullet.load_urdf(
self._pybullet_client, i, useFixedBase=False))
# Re-enable rendering.
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 1)
self.step_simulation_to_stabilize()
def _reset_block_poses(self, workspace_center_x):
"""Resets block poses."""
# Helper for choosing random block position.
def _reset_block_pose(idx, avoid=None):
def _get_random_translation():
block_x = workspace_center_x + self._rng.uniform(low=-0.1, high=0.1)
block_y = -0.2 + self._rng.uniform(low=-0.15, high=0.15)
block_translation = np.array([block_x, block_y, 0])
return block_translation
if avoid is None:
block_translation = _get_random_translation()
else:
# Reject targets too close to `avoid`.
for _ in range(NUM_RESET_ATTEMPTS):
block_translation = _get_random_translation()
dist = np.linalg.norm(block_translation[0] - avoid[0])
# print('block inner try_idx %d, dist %.3f' % (try_idx, dist))
if dist > MIN_BLOCK_DIST:
break
block_sampled_angle = self._rng.uniform(math.pi)
block_rotation = transform.Rotation.from_rotvec(
[0, 0, block_sampled_angle])
self._pybullet_client.resetBasePositionAndOrientation(
self._block_ids[idx], block_translation.tolist(),
block_rotation.as_quat().tolist())
return block_translation
# Reject targets too close to `avoid`.
for _ in range(NUM_RESET_ATTEMPTS):
# Reset first block.
b0_translation = _reset_block_pose(0)
# Reset second block away from first block.
b1_translation = _reset_block_pose(1, avoid=b0_translation)
dist = np.linalg.norm(b0_translation[0] - b1_translation[0])
if dist > MIN_BLOCK_DIST:
break
else:
raise ValueError('could not find matching block')
assert dist > MIN_BLOCK_DIST
def _reset_target_poses(self, workspace_center_x):
"""Resets target poses."""
def _reset_target_pose(idx, avoid=None):
def _get_random_translation():
# Choose x,y randomly.
target_x = workspace_center_x + self._rng.uniform(low=-0.10, high=0.10)
target_y = 0.2 + self._rng.uniform(low=-0.15, high=0.15)
target_translation = np.array([target_x, target_y, 0.020])
return target_translation
if avoid is None:
target_translation = _get_random_translation()
else:
# Reject targets too close to `avoid`.
for _ in range(NUM_RESET_ATTEMPTS):
target_translation = _get_random_translation()
dist = np.linalg.norm(target_translation[0] - avoid[0])
# print('target inner try_idx %d, dist %.3f' % (try_idx, dist))
if dist > MIN_TARGET_DIST:
break
target_sampled_angle = math.pi + self._rng.uniform(
low=-math.pi / 6, high=math.pi / 6)
target_rotation = transform.Rotation.from_rotvec(
[0, 0, target_sampled_angle])
self._pybullet_client.resetBasePositionAndOrientation(
self._target_ids[idx], target_translation.tolist(),
target_rotation.as_quat().tolist())
self._target_poses[idx] = Pose3d(rotation=target_rotation,
translation=target_translation)
if self._target_poses is None:
self._target_poses = [None for _ in range(len(self._target_ids))]
for _ in range(NUM_RESET_ATTEMPTS):
# Choose the first target.
_reset_target_pose(0)
_reset_target_pose(1, avoid=self._target_poses[0].translation)
dist = np.linalg.norm(self._target_poses[0].translation[0] -
self._target_poses[1].translation[0])
if dist > MIN_TARGET_DIST:
break
else:
raise ValueError('could not find matching target')
assert dist > MIN_TARGET_DIST
def reset(self, reset_poses = True):
workspace_center_x = 0.4
if reset_poses:
self._pybullet_client.restoreState(self._saved_state)
rotation = transform.Rotation.from_rotvec([0, math.pi, 0])
translation = np.array([0.3, -0.4, block_pushing.EFFECTOR_HEIGHT])
starting_pose = Pose3d(rotation=rotation, translation=translation)
self._set_robot_target_effector_pose(starting_pose)
# TODO(oars): Seems like restoreState doesn't clear JointMotorControl.
# Reset block poses.
self._reset_block_poses(workspace_center_x)
# Reset target poses.
self._reset_target_poses(workspace_center_x)
else:
self._target_poses = [
self._get_target_pose(idx) for idx in self._target_ids]
if reset_poses:
self.step_simulation_to_stabilize()
state = self._compute_state()
self._previous_state = state
return state
def _get_target_pose(self, idx):
target_translation, target_orientation_quat = (
self._pybullet_client.getBasePositionAndOrientation(idx))
target_rotation = transform.Rotation.from_quat(target_orientation_quat)
target_translation = np.array(target_translation)
return Pose3d(rotation=target_rotation, translation=target_translation)
def _compute_reach_target(self, state):
xy_block = state['block_translation']
xy_target = state['target_translation']
xy_block_to_target = xy_target - xy_block
xy_dir_block_to_target = (
xy_block_to_target) / np.linalg.norm(xy_block_to_target)
self.reach_target_translation = (xy_block + -1
* xy_dir_block_to_target * 0.05)
def _compute_state(self):
effector_pose = self._robot.forward_kinematics()
def _get_block_pose(idx):
block_position_and_orientation = self._pybullet_client.getBasePositionAndOrientation(
self._block_ids[idx])
block_pose = Pose3d(
rotation=transform.Rotation.from_quat(
block_position_and_orientation[1]),
translation=block_position_and_orientation[0])
return block_pose
block_poses = [_get_block_pose(i) for i in range(len(self._block_ids))]
def _yaw_from_pose(pose):
return np.array([pose.rotation.as_euler('xyz', degrees=False)[-1]])
obs = collections.OrderedDict(
block_translation=block_poses[0].translation[0:2],
block_orientation=_yaw_from_pose(block_poses[0]),
block2_translation=block_poses[1].translation[0:2],
block2_orientation=_yaw_from_pose(block_poses[1]),
effector_translation=effector_pose.translation[0:2],
effector_target_translation=self._target_effector_pose.translation[0:2],
target_translation=self._target_poses[0].translation[0:2],
target_orientation=_yaw_from_pose(self._target_poses[0]),
target2_translation=self._target_poses[1].translation[0:2],
target2_orientation=_yaw_from_pose(self._target_poses[1]))
if self._image_size is not None:
obs['rgb'] = self._render_camera(self._image_size)
return obs
def step(self, action):
self._step_robot_and_sim(action)
state = self._compute_state()
# TODO(oars, peteflorence): Fix calculation for when the block is within the
# insert object.
done = False
reward = self._get_reward(state)
if reward > 0.:
# Terminate the episode if both blocks are close enough to the targets.
done = True
return state, reward, done, {}
def _get_reward(self, state):
# Reward is 1. if both blocks are inside targets, but not the same target.
targets = ['target', 'target2']
def _block_target_dist(block, target):
return np.linalg.norm(state['%s_translation' % block]
- state['%s_translation' % target])
def _closest_target(block):
# Distances to all targets.
dists = [_block_target_dist(block, t) for t in targets]
# Which is closest.
closest_target = targets[np.argmin(dists)]
closest_dist = np.min(dists)
# Is it in the closest target?
in_target = closest_dist < self.goal_dist_tolerance
return closest_target, in_target
b0_closest_target, b0_in_target = _closest_target('block')
b1_closest_target, b1_in_target = _closest_target('block2')
reward = 0.
if b0_in_target and b1_in_target and (
b0_closest_target != b1_closest_target):
reward = 1.
return reward
def _compute_goal_distance(self, state):
blocks = ['block', 'block2']
def _target_block_dist(target, block):
return np.linalg.norm(state['%s_translation' % block]
- state['%s_translation' % target])
def _closest_block_dist(target):
dists = [_target_block_dist(target, b) for b in blocks]
closest_dist = np.min(dists)
return closest_dist
t0_closest_dist = _closest_block_dist('target')
t1_closest_dist = _closest_block_dist('target2')
return np.mean([t0_closest_dist, t1_closest_dist])
@property
def succeeded(self):
state = self._compute_state()
reward = self._get_reward(state)
if reward > 0:
return True
return False
def _create_observation_space(self, image_size):
pi2 = math.pi * 2
# TODO(oars, peteflorence): We can consider simplifying the obs specs,
# especially the observations so that they go frim 0 to 2pi, with no
# negatives.
obs_dict = collections.OrderedDict(
block_translation=spaces.Box(low=-5, high=5, shape=(2,)), # x,y
block_orientation=spaces.Box(low=-pi2, high=pi2, shape=(1,)), # phi
block2_translation=spaces.Box(low=-5, high=5, shape=(2,)), # x,y
block2_orientation=spaces.Box(low=-pi2, high=pi2, shape=(1,)), # phi
effector_translation=spaces.Box(
low=block_pushing.WORKSPACE_BOUNDS[0] - 0.1,
high=block_pushing.WORKSPACE_BOUNDS[1] + 0.1,
), # x,y
effector_target_translation=spaces.Box(
low=block_pushing.WORKSPACE_BOUNDS[0] - 0.1,
high=block_pushing.WORKSPACE_BOUNDS[1] + 0.1,
), # x,y
target_translation=spaces.Box(low=-5, high=5, shape=(2,)), # x,y
target_orientation=spaces.Box(
low=-pi2,
high=pi2,
shape=(1,),
), # theta
target2_translation=spaces.Box(low=-5, high=5, shape=(2,)), # x,y
target2_orientation=spaces.Box(
low=-pi2,
high=pi2,
shape=(1,),
), # theta
)
if image_size is not None:
obs_dict['rgb'] = spaces.Box(
low=0,
high=255,
shape=(image_size[0], image_size[1], 3),
dtype=np.uint8)
return spaces.Dict(obs_dict)
def get_pybullet_state(self):
"""Save pybullet state of the scene.
Returns:
dict containing 'robots', 'robot_end_effectors', 'targets', 'objects',
each containing a list of ObjState.
"""
# TODO(tompson): This is very brittle.
# Restoring state requires that block objects are created in the same order
# as was done at save time and a lot of hparams are consistent (e.g.
# control_frequence, etc).
state: Dict[str, List[ObjState]] = {}
state['robots'] = [
XarmState.get_bullet_state(
self._pybullet_client, self.robot.xarm,
target_effector_pose=self._target_effector_pose,
goal_translation=None)]
state['robot_end_effectors'] = []
if self.robot.end_effector:
state['robot_end_effectors'].append(
ObjState.get_bullet_state(
self._pybullet_client, self.robot.end_effector))
state['targets'] = []
if self._target_ids:
for target_id in self._target_ids:
state['targets'].append(ObjState.get_bullet_state(
self._pybullet_client, target_id))
state['objects'] = []
for obj_id in self.get_obj_ids():
state['objects'].append(ObjState.get_bullet_state(
self._pybullet_client, obj_id))
return state
def set_pybullet_state(
self, state):
"""Restore pyullet state.
WARNING: py_environment wrapper assumes environments aren't reset in their
constructor and will often reset the environment unintentionally. It is
always recommeneded that you call env.reset on the tfagents wrapper before
playback (replaying pybullet_state).
Args:
state: dict containing 'robots', 'robot_end_effectors', 'targets',
'objects', each containing a list of ObjState.
"""
assert isinstance(state['robots'][0], XarmState)
xarm_state: XarmState = state['robots'][0]
xarm_state.set_bullet_state(self._pybullet_client, self.robot.xarm)
self._set_robot_target_effector_pose(xarm_state.target_effector_pose)
def _set_state_safe(obj_state, obj_id):
if obj_state is not None:
assert obj_id is not None, 'Cannot set state for missing object.'
obj_state.set_bullet_state(self._pybullet_client, obj_id)
else:
assert obj_id is None, f'No state found for obj_id {obj_id}'
robot_end_effectors = state['robot_end_effectors']
_set_state_safe(
None if not robot_end_effectors else robot_end_effectors[0],
self.robot.end_effector)
for target_state, target_id in zip(state['targets'], self._target_ids):
_set_state_safe(target_state, target_id)
obj_ids = self.get_obj_ids()
assert len(state['objects']) == len(obj_ids), 'State length mismatch'
for obj_state, obj_id in zip(state['objects'], obj_ids):
_set_state_safe(obj_state, obj_id)
self.reset(reset_poses=False)
if 'BlockPushMultimodal-v0' in registration.registry.env_specs:
del registration.registry.env_specs['BlockPushMultimodal-v0']
registration.register(
id='BlockPushMultimodal-v0',
entry_point=BlockPushMultimodal,
max_episode_steps=200)
registration.register(
id='SharedBlockPushMultimodal-v0',
entry_point=BlockPushMultimodal,
kwargs=dict(shared_memory=True),
max_episode_steps=200)
registration.register(
id='BlockPushMultimodalRgb-v0',
entry_point=BlockPushMultimodal,
max_episode_steps=200,
kwargs=dict(
image_size=(block_pushing.IMAGE_HEIGHT, block_pushing.IMAGE_WIDTH)))
| {
"content_hash": "a38f6008d4c110e579df93e160ef0606",
"timestamp": "",
"source": "github",
"line_count": 476,
"max_line_length": 91,
"avg_line_length": 37.32142857142857,
"alnum_prop": 0.650661412890515,
"repo_name": "google-research/ibc",
"id": "75dc66a3e971a8c199eae1dd5c6e6bd6d25c866e",
"size": "18366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "environments/block_pushing/block_pushing_multimodal.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "390694"
},
{
"name": "Shell",
"bytes": "8343"
}
],
"symlink_target": ""
} |
"""
Tests of ``jedi.api.Interpreter``.
"""
from ..helpers import TestCase
import jedi
from jedi._compatibility import is_py33
class TestInterpreterAPI(TestCase):
def check_interpreter_complete(self, source, namespace, completions,
**kwds):
script = jedi.Interpreter(source, [namespace], **kwds)
cs = script.completions()
actual = [c.name for c in cs]
self.assertEqual(sorted(actual), sorted(completions))
def test_complete_raw_function(self):
from os.path import join
self.check_interpreter_complete('join().up',
locals(),
['upper'])
def test_complete_raw_function_different_name(self):
from os.path import join as pjoin
self.check_interpreter_complete('pjoin().up',
locals(),
['upper'])
def test_complete_raw_module(self):
import os
self.check_interpreter_complete('os.path.join().up',
locals(),
['upper'])
def test_complete_raw_instance(self):
import datetime
dt = datetime.datetime(2013, 1, 1)
completions = ['time', 'timetz', 'timetuple']
if is_py33:
completions += ['timestamp']
self.check_interpreter_complete('(dt - dt).ti',
locals(),
completions)
def test_list(self):
array = ['haha', 1]
self.check_interpreter_complete('array[0].uppe',
locals(),
['upper'])
self.check_interpreter_complete('array[0].real',
locals(),
[])
# something different, no index given, still just return the right
self.check_interpreter_complete('array[int].real',
locals(),
['real'])
self.check_interpreter_complete('array[int()].real',
locals(),
['real'])
# inexistent index
self.check_interpreter_complete('array[2].upper',
locals(),
['upper'])
def test_slice(self):
class Foo():
bar = []
baz = 'xbarx'
self.check_interpreter_complete('getattr(Foo, baz[1:-1]).append',
locals(),
['append'])
def test_getitem_side_effects(self):
class Foo():
def __getitem__(self, index):
# possible side effects here, should therefore not call this.
return index
foo = Foo()
self.check_interpreter_complete('foo[0].', locals(), [])
| {
"content_hash": "deb0c3fe5320ceab4e42e20d633f831e",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 77,
"avg_line_length": 37.548780487804876,
"alnum_prop": 0.44624878207210134,
"repo_name": "turiphro/dockerfiles",
"id": "36d85ecbae4216230de0462e25d46de1705f420e",
"size": "3079",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "devbox/etc/.vim/bundle/jedi-vim/jedi/test/test_api/test_interpreter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Erlang",
"bytes": "2606"
},
{
"name": "JavaScript",
"bytes": "1064"
},
{
"name": "Makefile",
"bytes": "8491"
},
{
"name": "Python",
"bytes": "576671"
},
{
"name": "Shell",
"bytes": "2746"
},
{
"name": "VimL",
"bytes": "1652283"
}
],
"symlink_target": ""
} |
"""Tracker server for hosting .track torrent files.
"""
__license__ = "MIT"
__docformat__ = 'reStructuredText'
import socket
import socketserver
import threading
import sys
import os
import os.path
import urllib.parse
from ipaddress import IPv4Address,AddressValueError
import fcntl
import trackerfile
import apiutils
import sillycfg
flock = threading.Lock()
class TrackerServerHandler(socketserver.BaseRequestHandler):
"""The request handler for TrackerServer.
"""
def handle(self):
"""Convert peer requests into api_* methods.
This method is called when data is received. It interprets the
command-and-arguments structure dictated by the API into a method
to which the interpreted arguments are passed. Arguments are decoded
using :func:`apiutils.arg_decode` before being passed on, but they
remain strings.
"""
#get (MAX_MESSAGE_LENGTH + 1) bytes
data = str(self.request.recv(self.server.MAX_MESSAGE_LENGTH+1),
*apiutils.encoding_defaults)
#check if data is <= MAX_MESSAGE_LENGTH
if len(data) > self.server.MAX_MESSAGE_LENGTH:
print("Request too long")
# this is out-of-spec, but necessary
return self.exception( 'RequestTooLong', "Maximum message length " \
"is {}".format(self.server.MAX_MESSAGE_LENGTH) )
print("\nReceived {}".format(data))
#Retrieve command and args from message
match = apiutils.re_apicommand.match( data )
if not match:
print("Bad Request: {!r}".format(data))
# this is out-of-spec, but necessary
return self.exception( 'BadRequest', "Failed to parse request" )
command, args = match.group('command','args')
command = command.lower()
#parse arguments
args = args.split()
args = map( apiutils.arg_decode, args )
#find the desired method
api_method = getattr( self, 'api_{}'.format(command),None )
if not api_method:
print("Bad method: {}".format(command) )
#this is out-of-spec, but necessary
return self.exception( 'BadRequest', "No such method {!r}".format(
command) )
#try calling the method with arguments
try:
api_method( *args )
except TypeError as err:
if 'positional arguments' in str(err):
print("Bad Request: {}".format(err.args[0]))
return self.exception('BadRequest', err.args[0])
def api_createtracker(self, fname, fsize, descrip, md5, ip, port):
"""Implements the createtracker API command.
All arguments are expected to be strings, but *fsize* and *port* should
be castable to :class:`int` and *ip* should be castable to
:class:`~ipaddress.IPv4Address`.
"""
fname,descrip,md5 = map( str, (fname,descrip,md5) )
try:
fsize,port = int(fsize),int(port)
except ValueError:
print("Either fsize ({!r}) or port ({!r}) is not a valid " \
"integer".format(fsize,port))
self.request.sendall( b"<createtracker fail>" )
return
try:
ip = IPv4Address(ip)
except AddressValueError:
print("Malformed IP Address: {!r}".format(ip))
self.request.sendall( b"<createtracker fail>" )
return
tfname = "{}.track".format( fname )
tfpath = os.path.join( self.server.torrents_dir, tfname )
#check if .track file already exists
if os.path.exists( tfpath ):
print("Couldn't create tracker, already exists.")
self.request.sendall( b"<createtracker ferr>" )
return
#create a new trackerfile
try:
tf = trackerfile.trackerfile( fname, fsize, descrip, md5 )
except Exception as err:
print(err)
self.request.sendall( b"<createtracker fail>" )
return
print("Created new trackerfile instance for fname={!r}".format(fname))
#add creator as peer
try:
tf.updatePeer( ip, port, 0, fsize-1 )
except Exception as err:
print(err)
self.request.sendall( b"<createtracker fail>" )
return
print("Added {} (creator) to trackerfile".format( ip ))
#write tracker to file
with open(tfpath, 'w') as fl:
fcntl.lockf( fl, fcntl.LOCK_EX )
tf.writeTo( fl )
fcntl.lockf( fl, fcntl.LOCK_UN )
print("Wrote trackerfile to disk.")
self.request.sendall( b"<createtracker succ>" )
return
def api_updatetracker(self, fname, start_bytes, end_bytes, ip, port):
"""Implements the updatetracker API command.
All arguments are expected to be strings, but *start_bytes*,
*end_bytes*, and *port* should be castable to :class:`int` and
*ip* should be castable to :class:`~ipaddress.IPv4Address`.
"""
fname = str(fname)
try:
start_bytes,end_bytes,port = map(int, (start_bytes,end_bytes,port))
except ValueError:
print("Either start_bytes ({!r}), end_bytes ({!r}), or port ({!r})"\
" is not a valid integer".format(start_bytes,end_bytes,port))
self.request.sendall( b"<updatetracker fail>" )
return
try:
ip = IPv4Address(ip)
except AddressValueError:
print("Malformed IP Address: {!r}".format(ip))
self.request.sendall( b"<updatetracker fail>" )
return
tfname = "{}.track".format( fname )
tfpath = os.path.join( self.server.torrents_dir, tfname )
#check if .track file exists
if not os.path.exists( tfpath ):
print("Can't update tracker file, doesn't exist")
self.request.sendall( b"<updatetracker ferr>" )
return
#create trackerfile from existing tracker
try:
tf = trackerfile.trackerfile.fromPath( tfpath )
except Exception as err:
print(err)
self.request.sendall( b"<updatetracker fail>" )
return
#clean trackerfile
tf.clean()
#add peer peer
try:
tf.updatePeer( ip, port, start_bytes, end_bytes )
except Exception as err:
print(err)
self.request.sendall( b"<updatetracker fail>" )
return
print("Added {} (creator) to trackerfile".format( ip ))
#write tracker to file
with open(tfpath, 'w') as fl:
fcntl.lockf( fl, fcntl.LOCK_EX )
tf.writeTo( fl )
fcntl.lockf( fl, fcntl.LOCK_UN )
print("Wrote trackerfile to disk.")
self.request.sendall( b"<updatetracker succ>" )
return
def api_req(self, *_):
"""Implements the so-called "list" API command (<REQ LIST>).
The method expects no arguments, but will accept them for compatibility.
"""
thelist = []
tracklist = []
dirname = self.server.torrents_dir
try:
thelist = os.listdir( dirname )
except Exception as err:
print(err)
# ! this is out-of-spec, but necessary
self.exception(type(err).__name__, str(err))
return
for flname in thelist:
if flname.endswith('.track'):
tracklist.append(flname)
self.request.sendall( bytes("<REP LIST {}>\n".format(len(tracklist)),
*apiutils.encoding_defaults) )
for i in range(len(tracklist)):
tfname = tracklist[i]
tf = trackerfile.trackerfile.fromPath( os.path.join(dirname,tfname))
self.request.sendall( bytes("<{} {} {} {}>\n".format(i, tf.filename,
tf.filesize, tf.md5),
*apiutils.encoding_defaults) )
self.request.sendall( b"<REP LIST END>\n" )
print("Successfully send REP response to {0[0]}:{0[1]}.".format(
self.request.getpeername()))
return
def api_get(self, track_fname):
"""Implements the GET API command.
*track_fname* should be the name of a .track file in the torrents
directory given in the server config file.
"""
track_fname = str(track_fname)
tfpath = os.path.join( self.server.torrents_dir, track_fname )
#check if .track file exists
if not os.path.exists( tfpath ):
print("Can't get tracker file, doesn't exist")
self.exception("FileNotFound", "No such file {!r}".format(
track_fname))
return
#create trackerfile from existing tracker
try:
tf = trackerfile.trackerfile.fromPath( tfpath )
except Exception as err:
print(err)
return self.exception(type(err).__name__, str(err))
#clean trackerfile
if tf.clean():
with open( tfpath, 'w' ) as fl:
fcntl.lockf( fl, fcntl.LOCK_EX )
tf.writeTo( fl )
fcntl.lockf( fl, fcntl.LOCK_UN )
#write the tracker file to the socket
self.request.sendall( b"<REP GET BEGIN>\n" )
tf.writeToSocket( self.request )
self.request.sendall( bytes( "<REP GET END {}>\n".format(tf.md5),
*apiutils.encoding_defaults))
print("Sent REP response for {0!r} to {1[0]}:{1[1]}".format(track_fname,
self.request.getpeername()))
def api_hello(self, *_):
""" Implements the out-of-spec API hello message.
This API command takes no arguments (but accepts them) and simply
responds HELLO back. Used for helping the peer determine connectivity
and its public IP address.
"""
self.request.sendall( b"<HELLO>\n" )
print("Sent HELLO response to {0[0]}:{0[1]}".format(
self.request.getpeername()))
def exception(self, exceptionType, exceptionInfo=''):
exceptionType = exceptionType.replace(' ','')
if exceptionInfo:
exceptionInfo ="{}\n".format(urllib.parse.quote_plus(exceptionInfo))
else:
exceptionInfo = ''
response = "<EXCEPTION {}>\n{}<EXCEPTION END>\n".format( exceptionType,
exceptionInfo )
self.request.sendall( bytes(response, *apiutils.encoding_defaults) )
class TrackerServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
"""The socket server for handling incoming requests.
Unlike the TCPServer constructor, this takes a *server_ip* string
instead of a tuple address because we will read the port for the
address from the config file.
Arguments:
server_ip (str): The IP to bind to and listen to.
RequestHandlerClass (:class:`~socketserver.BaseRequestHandler`):
Should be :class:`~.TrackerServerHandler`.
bind_and_activate (bool, optional): automatically invokes server
binding and activation procedures.
config_file (str, optional): Path to server configuration file.
"""
allow_reuse_address = True
config_file = None
MAX_MESSAGE_LENGTH = 4096
__torrents_dir = None
def __init__(self, server_ip, RequestHandlerClass,
bind_and_activate=True,
config_file='./serverThreadConfig.cfg'):
"""TrackerServer initializer."""
self.config_file = sillycfg.ServerConfig.fromFile( config_file )
self.torrents_dir = self.config_file.sharedFolder
server_port = self.config_file.listenPort
server_address = (server_ip,server_port)
print("Server will bind to {}:{}".format(*server_address))
super(TrackerServer, self).__init__(server_address, RequestHandlerClass,
bind_and_activate)
@property
def torrents_dir(self):
return self.__torrents_dir
@torrents_dir.setter
def torrents_dir(self,val):
val = os.path.abspath(val)
if not ( sillycfg.dirmaker(val) ):
raise RuntimeError("Failed to make torrents directory")
self.__torrents_dir = val
#
# Executable form for testing
#
if __name__ == '__main__':
srv_ip = "localhost"
if len(sys.argv) > 1:
try:
srv_ip = str( IPv4Address(sys.argv[1]) )
except AddressValueError:
pass
srv = TrackerServer( srv_ip, TrackerServerHandler )
print("Listening on port {}".format(srv.config_file.listenPort))
try:
srv.serve_forever()
except KeyboardInterrupt:
print("\n"+"="*40)
print("Bye, have a wonderful time! (Tracker server shutting down)")
finally:
srv.shutdown()
print("Tracker server has shut down")
| {
"content_hash": "837dd5de6459f8b935379c9511e4bc32",
"timestamp": "",
"source": "github",
"line_count": 409,
"max_line_length": 80,
"avg_line_length": 34.520782396088016,
"alnum_prop": 0.5358028188965224,
"repo_name": "skgrush/py-mstorrent",
"id": "c8a2a81e4650de0d415d5d668f821e1076d3da41",
"size": "14166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "617"
},
{
"name": "Python",
"bytes": "90161"
}
],
"symlink_target": ""
} |
from models import User
from templates import TextTemplate
from utilities import send_message
def send_campaign():
message = TextTemplate(text="NEW FEATURE: SUBSCRIPTIONS \n\n"+
"Hi there, this week a new feature is coming out and that is SUBSCRIPTIONS.\n\n"+
"How it works: When someone gets into the Waiting List due to non availability of "+
"partners, we will send out a message to our subscribed users. For example, if you "+
"subscribe for women, we will notify you when a woman is looking for a partner even "+
"when you are not active and hence you'll gain the chance to chat if you are free. \n\n"+
"The feature will be made available to every user after one month but some users will "+
"be given access to it within 1-2 days. To be eligible for getting access, LIKE our "+
"page and leave a REVIEW on our page within 36 hours. Just to emphasize, please "+
"complete both to be eligible. \n\nIf you have any question, post it on our page. "+
"We'll guide you, but make it within the 36 hours because after that, the feature will be out.")
print("IN CAMPAIGN")
message = TextTemplate(text="FUCKING TEST")
#users = User.query.all()
#for user in users:
# id = user.id
#send_message(message, id=id)
users = ["1708022462556195", "1145959985508112"]
for user in users:
send_message(message, id=user)
| {
"content_hash": "0a181e05872630f5d6f0085f9ce0ceb4",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 128,
"avg_line_length": 64.34615384615384,
"alnum_prop": 0.5845786013150029,
"repo_name": "mayukh18/BlindChat",
"id": "8cedb6c30c4a274e9b70b889cc519816f07bef9a",
"size": "1673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/campaign.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "41"
},
{
"name": "HTML",
"bytes": "3075"
},
{
"name": "Python",
"bytes": "59076"
}
],
"symlink_target": ""
} |
"""Home Assistant command line scripts."""
from __future__ import annotations
import argparse
import asyncio
from collections.abc import Sequence
import importlib
import logging
import os
import sys
from homeassistant import runner
from homeassistant.bootstrap import async_mount_local_lib_path
from homeassistant.config import get_default_config_dir
from homeassistant.requirements import pip_kwargs
from homeassistant.util.package import install_package, is_installed, is_virtual_env
# mypy: allow-untyped-defs, no-warn-return-any
def run(args: list) -> int:
"""Run a script."""
scripts = []
path = os.path.dirname(__file__)
for fil in os.listdir(path):
if fil == "__pycache__":
continue
if os.path.isdir(os.path.join(path, fil)):
scripts.append(fil)
elif fil != "__init__.py" and fil.endswith(".py"):
scripts.append(fil[:-3])
if not args:
print("Please specify a script to run.")
print("Available scripts:", ", ".join(scripts))
return 1
if args[0] not in scripts:
print("Invalid script specified.")
print("Available scripts:", ", ".join(scripts))
return 1
script = importlib.import_module(f"homeassistant.scripts.{args[0]}")
config_dir = extract_config_dir()
loop = asyncio.get_event_loop()
if not is_virtual_env():
loop.run_until_complete(async_mount_local_lib_path(config_dir))
_pip_kwargs = pip_kwargs(config_dir)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
for req in getattr(script, "REQUIREMENTS", []):
if is_installed(req):
continue
if not install_package(req, **_pip_kwargs):
print("Aborting script, could not install dependency", req)
return 1
asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
return script.run(args[1:]) # type: ignore
def extract_config_dir(args: Sequence[str] | None = None) -> str:
"""Extract the config dir from the arguments or get the default."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-c", "--config", default=None)
parsed_args = parser.parse_known_args(args)[0]
return (
os.path.join(os.getcwd(), parsed_args.config)
if parsed_args.config
else get_default_config_dir()
)
| {
"content_hash": "bfd561578806830cb43e6861c468f825",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 84,
"avg_line_length": 29.962025316455698,
"alnum_prop": 0.6556822982678496,
"repo_name": "kennedyshead/home-assistant",
"id": "b31fc7181731f0462e1090890025bcaa92c79d40",
"size": "2367",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/scripts/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "33970989"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
} |
import os
from harmony.settings.common import *
DEBUG = False
ALLOWED_HOSTS = ['.fas.harvard.edu'] # Required when Debug=False
FORCE_SCRIPT_NAME = None
STATIC_URL = '/static/'
# Configuration specific to shared hosting PROD/DEV environments
# PRODUCTION
if os.environ.get('SERVER_NAME') == 'harmonylab.fas.harvard.edu':
FORCE_SCRIPT_NAME = '/'
STATIC_URL = '/static/'
DEBUG = False
# DEVELOPMENT
elif os.environ.get('SERVER_NAME') == 'sites.dev.fas.harvard.edu':
FORCE_SCRIPT_NAME = '/~harmonylab/'
STATIC_URL = '/~harmonylab/static/'
DEBUG = True
# Update the requirejs configuration to use the modified STATIC_URL
REQUIREJS_DEBUG, REQUIREJS_CONFIG = requirejs.configure(ROOT_DIR, STATIC_URL)
# Configuration common to both PROD/DEV
CONFIG_DIR = os.path.join(ROOT_DIR, 'config')
# These are sensitive values that should be retrieved from separate configuration files.
# Note that these config files should *NEVER* be stored in version control.
SECRET_KEY = None
with open(os.path.join(CONFIG_DIR, 'django_secret.txt')) as f:
SECRET_KEY = f.read().strip()
LTI_OAUTH_CREDENTIALS = {}
with open(os.path.join(CONFIG_DIR, 'lti_oauth_credentials.txt')) as f:
LTI_OAUTH_CREDENTIALS = dict([tuple(x.strip().split(':', 2)) for x in f.readlines()])
| {
"content_hash": "6debd7f42b06cad843f682a5e3a69a75",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 89,
"avg_line_length": 36.542857142857144,
"alnum_prop": 0.7185301016419078,
"repo_name": "Harvard-ATG/HarmonyLab",
"id": "6a71ec809890947c3cf7a276f84b88eedd55536f",
"size": "1308",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "harmony/settings/sharedhosting.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "111347"
},
{
"name": "HTML",
"bytes": "16403"
},
{
"name": "JavaScript",
"bytes": "2344772"
},
{
"name": "LilyPond",
"bytes": "83523"
},
{
"name": "Python",
"bytes": "75106"
},
{
"name": "Shell",
"bytes": "9331"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import copy
import sys
from functools import update_wrapper
from django.utils.six.moves import zip
import django.db.models.manager # Imported to register signal handler.
from django.conf import settings
from django.core.exceptions import (ObjectDoesNotExist,
MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS)
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.related import (ForeignObjectRel, ManyToOneRel,
OneToOneField, add_lazy_relation)
from django.db import (router, transaction, DatabaseError,
DEFAULT_DB_ALIAS)
from django.db.models.query import Q
from django.db.models.query_utils import DeferredAttribute, deferred_class_factory
from django.db.models.deletion import Collector
from django.db.models.options import Options
from django.db.models import signals
from django.db.models.loading import register_models, get_model
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import curry
from django.utils.encoding import force_str, force_text
from django.utils import six
from django.utils.text import get_text_list, capfirst
def subclass_exception(name, parents, module, attached_to=None):
"""
Create exception subclass. Used by ModelBase below.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
"""
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
# Exceptions are special - they've got state that isn't
# in self.__dict__. We assume it is all in self.args.
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
# six.with_metaclass() inserts an extra class called 'NewBase' in the
# inheritance tree: Model -> NewBase -> object. But the initialization
# should be executed only once for a given model class.
# attrs will never be empty for classes declared in the standard way
# (ie. with the `class` keyword). This is quite robust.
if name == 'NewBase' and attrs == {}:
return super_new(cls, name, bases, attrs)
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase) and
not (b.__name__ == 'NewBase' and b.__mro__ == (b, object))]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
if getattr(meta, 'app_label', None) is None:
# Figure out the app_label by looking one level up.
# For 'django.contrib.sites.models', this would be 'sites'.
model_module = sys.modules[new_class.__module__]
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
else:
kwargs = {}
new_class.add_to_class('_meta', Options(meta, **kwargs))
if not abstract:
new_class.add_to_class('DoesNotExist', subclass_exception(str('DoesNotExist'),
tuple(x.DoesNotExist
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
or (ObjectDoesNotExist,),
module, attached_to=new_class))
new_class.add_to_class('MultipleObjectsReturned', subclass_exception(str('MultipleObjectsReturned'),
tuple(x.MultipleObjectsReturned
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
or (MultipleObjectsReturned,),
module, attached_to=new_class))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped))
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
# parents.
new_class._default_manager = None
new_class._base_manager = None
else:
# Proxy classes do inherit parent's default manager, if none is
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Bail out early if we have already created this class.
m = get_model(new_class._meta.app_label, name,
seed_cache=False, only_installed=False)
if m is not None:
return m
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = new_class._meta.local_fields + \
new_class._meta.local_many_to_many + \
new_class._meta.virtual_fields
field_names = set([f.name for f in new_fields])
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
else:
continue
if base is not None:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
else:
base = parent
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
if (new_class._meta.local_fields or
new_class._meta.local_many_to_many):
raise FieldError("Proxy model '%s' contains model fields." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Do the appropriate setup for any model parents.
o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
if isinstance(f, OneToOneField)])
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError('Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' %
(field.name, name, base.__name__))
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
if base in o2o_map:
field = o2o_map[base]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.model_name
field = OneToOneField(base, name=attr_name,
auto_created=True, parent_link=True)
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
# .. and abstract ones.
for field in parent_fields:
new_class.add_to_class(field.name, copy.deepcopy(field))
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base._meta.parents)
# Inherit managers from the abstract base classes.
new_class.copy_managers(base._meta.abstract_managers)
# Proxy models inherit the non-abstract managers from their base,
# unless they have redefined any of them.
if is_proxy:
new_class.copy_managers(original_base._meta.concrete_managers)
# Inherit virtual fields (like GenericForeignKey) from the parent
# class
for field in base._meta.virtual_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError('Local field %r in class %r clashes '\
'with field of similar name from '\
'abstract base class %r' % \
(field.name, name, base.__name__))
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
register_models(new_class._meta.app_label, new_class)
# Because of the way imports happen (recursively), we may or may not be
# the first time this model tries to register with the framework. There
# should only be one class for each model, so we always return the
# registered version.
return get_model(new_class._meta.app_label, name,
seed_cache=False, only_installed=False)
def copy_managers(cls, base_managers):
# This is in-place sorting of an Options attribute, but that's fine.
base_managers.sort()
for _, mgr_name, manager in base_managers:
val = getattr(cls, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(cls)
cls.add_to_class(mgr_name, new_manager)
def add_to_class(cls, name, value):
if hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""
Creates some methods once self._meta has been populated.
"""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
# defer creating accessors on the foreign class until we are
# certain it has been created
def make_foreign_order_accessors(field, model, cls):
setattr(
field.rel.to,
'get_%s_order' % cls.__name__.lower(),
curry(method_get_order, cls)
)
setattr(
field.rel.to,
'set_%s_order' % cls.__name__.lower(),
curry(method_set_order, cls)
)
add_lazy_relation(
cls,
opts.order_with_respect_to,
opts.order_with_respect_to.rel.to,
make_foreign_order_accessors
)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields]))
if hasattr(cls, 'get_absolute_url'):
cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url),
cls.get_absolute_url)
signals.class_prepared.send(sender=cls)
class ModelState(object):
"""
A class for storing instance state
"""
def __init__(self, db=None):
self.db = db
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
# This impacts validation only; it has no effect on the actual save.
self.adding = True
class Model(six.with_metaclass(ModelBase)):
_deferred = False
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(self._meta.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(self._meta.fields)
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Maintain compatibility with existing calls.
if isinstance(field.rel, ManyToOneRel):
kwargs.pop(field.attname, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# This slightly odd construct is so that we can access any
# data-descriptor object (DeferredAttribute) without triggering its
# __get__ method.
if (field.attname not in kwargs and
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
or field.column is None)):
# This field will be populated on request.
continue
if kwargs:
if isinstance(field.rel, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
# Object instance was passed in. Special case: You can
# pass in "None" for related objects if it's allowed.
if rel_obj is None and field.null:
val = None
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in list(kwargs):
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs.pop(prop))
except AttributeError:
pass
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
super(Model, self).__init__()
signals.post_init.send(sender=self.__class__, instance=self)
def __repr__(self):
try:
u = six.text_type(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return force_str('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if not six.PY3 and hasattr(self, '__unicode__'):
if type(self).__unicode__ == Model.__str__:
klass_name = type(self).__name__
raise RuntimeError("%s.__unicode__ is aliased to __str__. Did"
" you apply @python_2_unicode_compatible"
" without defining __str__?" % klass_name)
return force_text(self).encode('utf-8')
return '%s object' % self.__class__.__name__
def __eq__(self, other):
return isinstance(other, self.__class__) and self._get_pk_val() == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._get_pk_val())
def __reduce__(self):
"""
Provides pickling support. Normally, this just dispatches to Python's
standard handling. However, for models with deferred field loading, we
need to do things manually, as they're dynamically created classes and
only module-level classes can be pickled by the default path.
"""
if not self._deferred:
return super(Model, self).__reduce__()
data = self.__dict__
defers = []
for field in self._meta.fields:
if isinstance(self.__class__.__dict__.get(field.attname),
DeferredAttribute):
defers.append(field.attname)
model = self._meta.proxy_for_model
return (model_unpickle, (model, defers), data)
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def serializable_value(self, field_name):
"""
Returns the value of the field name for this instance. If the field is
a foreign key, returns the id value, instead of the object. If there's
no Field object with this name on the model, the model attribute's
value is returned directly.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field_by_name(field_name)[0]
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if len(update_fields) == 0:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
# If saving to the same database, and this model is deferred, then
# automatically do a "update_fields" save on the loaded fields.
elif not force_insert and self._deferred and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
deferred_fields = [
f.attname for f in self._meta.fields
if f.attname not in self.__dict__
and isinstance(self.__class__.__dict__[f.attname],
DeferredAttribute)]
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Handles the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or len(update_fields) > 0
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
with transaction.commit_on_success_unless_managed(using=using, savepoint=False):
if not raw:
self._save_parents(cls, using, update_fields)
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
signals.post_save.send(sender=origin, instance=self, created=(not updated),
update_fields=update_fields, raw=raw, using=using)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""
Saves all the parents of cls using values from self.
"""
meta = cls._meta
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (field and getattr(self, parent._meta.pk.attname) is None
and getattr(self, field.attname) is not None):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self._save_parents(cls=parent, using=using, update_fields=update_fields)
self._save_table(cls=parent, using=using, update_fields=update_fields)
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
cache_name = field.get_cache_name()
if hasattr(self, cache_name):
delattr(self, cache_name)
def _save_table(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Does the heavy-lifting involved in saving. Updates or inserts the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks
if f.name in update_fields or f.attname in update_fields]
pk_val = self._get_pk_val(meta)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
for f in non_pks]
updated = self._do_update(base_qs, using, pk_val, values, update_fields)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
order_value = cls._base_manager.using(using).filter(
**{field.name: getattr(self, field.attname)}).count()
self._order = order_value
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if not isinstance(f, AutoField)]
update_pk = bool(meta.has_auto_field and not pk_set)
result = self._do_insert(cls._base_manager, using, fields, update_pk, raw)
if update_pk:
setattr(self, meta.pk.attname, result)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields):
"""
This method will try to update the model. If the model was updated (in
the sense that an update query was done and a matching row was found
from the DB) the method will return True.
"""
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or base_qs.filter(pk=pk_val).exists()
else:
return base_qs.filter(pk=pk_val)._update(values) > 0
def _do_insert(self, manager, using, fields, update_pk, raw):
"""
Do an INSERT. If update_pk is defined then this method should return
the new pk for the model.
"""
return manager._insert([self], fields=fields, return_id=update_pk,
using=using, raw=raw)
def delete(self, using=None):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
collector = Collector(using=using)
collector.collect([self])
collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_text(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = 'gt' if is_next else 'lt'
order = '' if is_next else '-'
param = force_text(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = 'gt' if is_next else 'lt'
order = '_order' if is_next else '-_order'
order_field = self._meta.order_with_respect_to
obj = self._default_manager.filter(**{
order_field.name: getattr(self, order_field.attname)
}).filter(**{
'_order__%s' % op: self._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, unused):
return self.pk
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Checks unique constraints on the model and raises ``ValidationError``
if any failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Gather a list of checks to perform. Since validate_unique could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check.
Fields that did not validate should also be excluded, but they need
to be passed in via the exclude argument.
"""
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
for parent_class in self._meta.parents.keys():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
for model_class, unique_together in unique_togethers:
for check in unique_together:
for name in check:
# If this is an excluded field, don't add this check.
if name in exclude:
break
else:
unique_checks.append((model_class, tuple(check)))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.parents.keys():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
if lookup_value is None:
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field, unique_for):
opts = self._meta
return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % {
'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)),
'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
'lookup': lookup_type,
}
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
model_name = capfirst(opts.verbose_name)
# A unique field
if len(unique_check) == 1:
field_name = unique_check[0]
field = opts.get_field(field_name)
field_label = capfirst(field.verbose_name)
# Insert the error into the error dict, very sneaky
return field.error_messages['unique'] % {
'model_name': six.text_type(model_name),
'field_label': six.text_type(field_label)
}
# unique_together
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
field_labels = get_text_list(field_labels, _('and'))
return _("%(model_name)s with this %(field_label)s already exists.") % {
'model_name': six.text_type(model_name),
'field_label': six.text_type(field_labels)
}
def full_clean(self, exclude=None):
"""
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ``ValidationError`` for any errors that occurred.
"""
errors = {}
if exclude is None:
exclude = []
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
for name in errors.keys():
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Cleans all fields and raises a ValidationError containing message_dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.messages
if errors:
raise ValidationError(errors)
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
with transaction.commit_on_success_unless_managed(using=using):
for i, j in enumerate(id_list):
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)
def method_get_order(ordered_obj, self):
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
pk_name = ordered_obj._meta.pk.name
return [r[pk_name] for r in
ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)]
##############################################
# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #
##############################################
def get_absolute_url(opts, func, self, *args, **kwargs):
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.model_name), func)(self, *args, **kwargs)
########
# MISC #
########
class Empty(object):
pass
def model_unpickle(model, attrs):
"""
Used to unpickle Model subclasses with deferred fields.
"""
cls = deferred_class_factory(model, attrs)
return cls.__new__(cls)
model_unpickle.__safe_for_unpickle__ = True
def unpickle_inner_exception(klass, exception_name):
# Get the exception class from the class it is attached to:
exception = getattr(klass, exception_name)
return exception.__new__(exception)
| {
"content_hash": "cf83eb506cc4a13bf3290da25a0f0eff",
"timestamp": "",
"source": "github",
"line_count": 1024,
"max_line_length": 166,
"avg_line_length": 43.7890625,
"alnum_prop": 0.5698037466547725,
"repo_name": "eltonsantos/django",
"id": "7a8eece462d7e4c48e5f1d777e7034c6935d23d7",
"size": "44840",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/db/models/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from __future__ import print_function
import collections
import contextlib
import six
import sys
__all__ = ['generate', 'switch', 'guard']
class UniqueNameGenerator(object):
"""
Generate unique name with prefix.
Args:
prefix(str): The generated name prefix. All generated name will be
started with this prefix.
"""
def __init__(self, prefix=None):
self.ids = collections.defaultdict(int)
if prefix is None:
prefix = ""
self.prefix = prefix
def __call__(self, key):
"""
Generate unique names with prefix
Args:
key(str): The key of return string.
Returns(str): A unique string with the prefix
"""
tmp = self.ids[key]
self.ids[key] += 1
return self.prefix + "_".join([key, str(tmp)])
generator = UniqueNameGenerator()
def generate(key):
return generator(key)
def switch(new_generator=None):
global generator
old = generator
if new_generator is None:
generator = UniqueNameGenerator()
else:
generator = new_generator
return old
@contextlib.contextmanager
def guard(new_generator=None):
if isinstance(new_generator, six.string_types):
new_generator = UniqueNameGenerator(new_generator)
elif isinstance(new_generator, six.binary_type):
new_generator = UniqueNameGenerator(new_generator.decode())
old = switch(new_generator)
yield
switch(old)
| {
"content_hash": "5be6fc730fa0ee8909ef9763c13ee21f",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 74,
"avg_line_length": 22.984615384615385,
"alnum_prop": 0.6258366800535475,
"repo_name": "reyoung/Paddle",
"id": "b9957a699e597898bee75ce0e7283f7224293f0c",
"size": "2107",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/unique_name.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "274815"
},
{
"name": "C++",
"bytes": "9634861"
},
{
"name": "CMake",
"bytes": "321482"
},
{
"name": "Cuda",
"bytes": "1290076"
},
{
"name": "Dockerfile",
"bytes": "8631"
},
{
"name": "Go",
"bytes": "109508"
},
{
"name": "Perl",
"bytes": "11456"
},
{
"name": "Python",
"bytes": "4853892"
},
{
"name": "Shell",
"bytes": "170766"
}
],
"symlink_target": ""
} |
"""
Test suite for pptx.oxml.__init__.py module, primarily XML parser-related.
"""
from __future__ import print_function, unicode_literals
import pytest
from lxml import etree
from pptx.oxml import (
oxml_parser, parse_xml, register_element_cls
)
from pptx.oxml.ns import qn
from pptx.oxml.xmlchemy import BaseOxmlElement
from ..unitutil.mock import function_mock, loose_mock, var_mock
class DescribeOxmlParser(object):
def it_strips_whitespace_between_elements(self, foo, stripped_xml_bytes):
xml_bytes = etree.tostring(foo)
assert xml_bytes == stripped_xml_bytes
class DescribeParseXml(object):
def it_uses_oxml_configured_parser_to_parse_xml(
self, mock_xml_bytes, fromstring, mock_oxml_parser):
element = parse_xml(mock_xml_bytes)
fromstring.assert_called_once_with(mock_xml_bytes, mock_oxml_parser)
assert element is fromstring.return_value
def it_prefers_to_parse_bytes(self, xml_bytes):
parse_xml(xml_bytes)
def but_accepts_unicode_providing_there_is_no_encoding_declaration(self):
non_enc_decl = '<?xml version="1.0" standalone="yes"?>'
enc_decl = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
xml_body = '<foo><bar>føøbår</bar></foo>'
# unicode body by itself doesn't raise
parse_xml(xml_body)
# adding XML decl without encoding attr doesn't raise either
xml_text = '%s\n%s' % (non_enc_decl, xml_body)
parse_xml(xml_text)
# but adding encoding in the declaration raises ValueError
xml_text = '%s\n%s' % (enc_decl, xml_body)
with pytest.raises(ValueError):
parse_xml(xml_text)
class DescribeRegisterCustomElementClass(object):
def it_determines_cust_elm_class_constructed_for_specified_tag(
self, xml_bytes):
register_element_cls('a:foo', CustElmCls)
foo = etree.fromstring(xml_bytes, oxml_parser)
assert type(foo) is CustElmCls
assert type(foo.find(qn('a:bar'))) is etree._Element
# ===========================================================================
# fixtures
# ===========================================================================
class CustElmCls(BaseOxmlElement):
pass
@pytest.fixture
def foo(xml_bytes):
return etree.fromstring(xml_bytes, oxml_parser)
@pytest.fixture
def fromstring(request):
return function_mock(request, 'pptx.oxml.etree.fromstring')
@pytest.fixture
def mock_oxml_parser(request):
return var_mock(request, 'pptx.oxml.oxml_parser')
@pytest.fixture
def mock_xml_bytes(request):
return loose_mock(request, 'xml_bytes')
@pytest.fixture
def stripped_xml_bytes():
return (
'<a:foo xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/ma'
'in"><a:bar>foobar</a:bar></a:foo>'
).encode('utf-8')
@pytest.fixture
def xml_bytes(xml_text):
return xml_text.encode('utf-8')
@pytest.fixture
def xml_text():
return (
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
'<a:foo xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/ma'
'in">\n'
' <a:bar>foobar</a:bar>\n'
'</a:foo>\n'
)
| {
"content_hash": "ddda3f2a7b1bc71f93a534c154f11c53",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 77,
"avg_line_length": 28.607142857142858,
"alnum_prop": 0.630461922596754,
"repo_name": "kevingu1003/python-pptx",
"id": "28c5f9bc51b660a5570df4db52f0eca81a70d458",
"size": "3226",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/oxml/test___init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "62746"
},
{
"name": "Makefile",
"bytes": "2051"
},
{
"name": "PLpgSQL",
"bytes": "48599"
},
{
"name": "Python",
"bytes": "1409851"
}
],
"symlink_target": ""
} |
import tests.periodicities.period_test as per
per.buildModel((60 , 'T' , 200));
| {
"content_hash": "dd31180970b43340631f5a650af97279",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 20.5,
"alnum_prop": 0.7073170731707317,
"repo_name": "antoinecarme/pyaf",
"id": "d3f9a449ae8d60bcc388d2c4d71341684c7561c3",
"size": "82",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/periodicities/Minute/Cycle_Minute_200_T_60.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
import os
import twisted
import six
from twisted.trial import unittest
from twisted.protocols.policies import WrappingFactory
from twisted.python.filepath import FilePath
from twisted.internet import reactor, defer, error
from twisted.web import server, static, util, resource
from twisted.web.test.test_webclient import ForeverTakingResource, \
NoLengthResource, HostHeaderResource, \
PayloadResource, BrokenDownloadResource
from twisted.protocols.ftp import FTPRealm, FTPFactory
from twisted.cred import portal, checkers, credentials
from twisted.protocols.ftp import FTPClient, ConnectionLost
from w3lib.url import path_to_file_uri
from scrapy import twisted_version
from scrapy.core.downloader.handlers import DownloadHandlers
from scrapy.core.downloader.handlers.file import FileDownloadHandler
from scrapy.core.downloader.handlers.http import HTTPDownloadHandler, HttpDownloadHandler
from scrapy.core.downloader.handlers.http10 import HTTP10DownloadHandler
from scrapy.core.downloader.handlers.http11 import HTTP11DownloadHandler
from scrapy.core.downloader.handlers.s3 import S3DownloadHandler
from scrapy.core.downloader.handlers.ftp import FTPDownloadHandler
from scrapy.spiders import Spider
from scrapy.http import Request
from scrapy.settings import Settings
from scrapy.utils.test import get_crawler
from scrapy.exceptions import NotConfigured
from tests.mockserver import MockServer
from tests.spiders import SingleRequestSpider
class DummyDH(object):
def __init__(self, crawler):
pass
class OffDH(object):
def __init__(self, crawler):
raise NotConfigured
class LoadTestCase(unittest.TestCase):
def test_enabled_handler(self):
handlers = {'scheme': 'tests.test_downloader_handlers.DummyDH'}
crawler = get_crawler(settings_dict={'DOWNLOAD_HANDLERS': handlers})
dh = DownloadHandlers(crawler)
self.assertIn('scheme', dh._schemes)
for scheme in handlers: # force load handlers
dh._get_handler(scheme)
self.assertIn('scheme', dh._handlers)
self.assertNotIn('scheme', dh._notconfigured)
def test_not_configured_handler(self):
handlers = {'scheme': 'tests.test_downloader_handlers.OffDH'}
crawler = get_crawler(settings_dict={'DOWNLOAD_HANDLERS': handlers})
dh = DownloadHandlers(crawler)
self.assertIn('scheme', dh._schemes)
for scheme in handlers: # force load handlers
dh._get_handler(scheme)
self.assertNotIn('scheme', dh._handlers)
self.assertIn('scheme', dh._notconfigured)
def test_disabled_handler(self):
handlers = {'scheme': None}
crawler = get_crawler(settings_dict={'DOWNLOAD_HANDLERS': handlers})
dh = DownloadHandlers(crawler)
self.assertNotIn('scheme', dh._schemes)
for scheme in handlers: # force load handlers
dh._get_handler(scheme)
self.assertNotIn('scheme', dh._handlers)
self.assertIn('scheme', dh._notconfigured)
class FileTestCase(unittest.TestCase):
def setUp(self):
self.tmpname = self.mktemp()
fd = open(self.tmpname + '^', 'w')
fd.write('0123456789')
fd.close()
self.download_request = FileDownloadHandler(Settings()).download_request
def test_download(self):
def _test(response):
self.assertEquals(response.url, request.url)
self.assertEquals(response.status, 200)
self.assertEquals(response.body, '0123456789')
request = Request(path_to_file_uri(self.tmpname + '^'))
assert request.url.upper().endswith('%5E')
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_non_existent(self):
request = Request('file://%s' % self.mktemp())
d = self.download_request(request, Spider('foo'))
return self.assertFailure(d, IOError)
class HttpTestCase(unittest.TestCase):
download_handler_cls = HTTPDownloadHandler
def setUp(self):
name = self.mktemp()
os.mkdir(name)
FilePath(name).child("file").setContent("0123456789")
r = static.File(name)
r.putChild("redirect", util.Redirect("/file"))
r.putChild("wait", ForeverTakingResource())
r.putChild("hang-after-headers", ForeverTakingResource(write=True))
r.putChild("nolength", NoLengthResource())
r.putChild("host", HostHeaderResource())
r.putChild("payload", PayloadResource())
r.putChild("broken", BrokenDownloadResource())
self.site = server.Site(r, timeout=None)
self.wrapper = WrappingFactory(self.site)
self.port = reactor.listenTCP(0, self.wrapper, interface='127.0.0.1')
self.portno = self.port.getHost().port
self.download_handler = self.download_handler_cls(Settings())
self.download_request = self.download_handler.download_request
@defer.inlineCallbacks
def tearDown(self):
yield self.port.stopListening()
if hasattr(self.download_handler, 'close'):
yield self.download_handler.close()
def getURL(self, path):
return "http://127.0.0.1:%d/%s" % (self.portno, path)
def test_download(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
return d
def test_download_head(self):
request = Request(self.getURL('file'), method='HEAD')
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, '')
return d
def test_redirect_status(self):
request = Request(self.getURL('redirect'))
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.status)
d.addCallback(self.assertEquals, 302)
return d
def test_redirect_status_head(self):
request = Request(self.getURL('redirect'), method='HEAD')
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.status)
d.addCallback(self.assertEquals, 302)
return d
@defer.inlineCallbacks
def test_timeout_download_from_spider(self):
spider = Spider('foo')
meta = {'download_timeout': 0.2}
# client connects but no data is received
request = Request(self.getURL('wait'), meta=meta)
d = self.download_request(request, spider)
yield self.assertFailure(d, defer.TimeoutError, error.TimeoutError)
# client connects, server send headers and some body bytes but hangs
request = Request(self.getURL('hang-after-headers'), meta=meta)
d = self.download_request(request, spider)
yield self.assertFailure(d, defer.TimeoutError, error.TimeoutError)
def test_host_header_not_in_request_headers(self):
def _test(response):
self.assertEquals(response.body, '127.0.0.1:%d' % self.portno)
self.assertEquals(request.headers, {})
request = Request(self.getURL('host'))
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_host_header_seted_in_request_headers(self):
def _test(response):
self.assertEquals(response.body, 'example.com')
self.assertEquals(request.headers.get('Host'), 'example.com')
request = Request(self.getURL('host'), headers={'Host': 'example.com'})
return self.download_request(request, Spider('foo')).addCallback(_test)
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, 'example.com')
return d
def test_payload(self):
body = '1'*100 # PayloadResource requires body length to be 100
request = Request(self.getURL('payload'), method='POST', body=body)
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, body)
return d
class DeprecatedHttpTestCase(HttpTestCase):
"""HTTP 1.0 test case"""
download_handler_cls = HttpDownloadHandler
class Http10TestCase(HttpTestCase):
"""HTTP 1.0 test case"""
download_handler_cls = HTTP10DownloadHandler
class Http11TestCase(HttpTestCase):
"""HTTP 1.1 test case"""
download_handler_cls = HTTP11DownloadHandler
if twisted_version < (11, 1, 0):
skip = 'HTTP1.1 not supported in twisted < 11.1.0'
def test_download_without_maxsize_limit(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
return d
@defer.inlineCallbacks
def test_download_with_maxsize(self):
request = Request(self.getURL('file'))
# 10 is minimal size for this request and the limit is only counted on
# response body. (regardless of headers)
d = self.download_request(request, Spider('foo', download_maxsize=10))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
yield d
d = self.download_request(request, Spider('foo', download_maxsize=9))
yield self.assertFailure(d, defer.CancelledError, error.ConnectionAborted)
@defer.inlineCallbacks
def test_download_with_maxsize_per_req(self):
meta = {'download_maxsize': 2}
request = Request(self.getURL('file'), meta=meta)
d = self.download_request(request, Spider('foo'))
yield self.assertFailure(d, defer.CancelledError, error.ConnectionAborted)
@defer.inlineCallbacks
def test_download_with_small_maxsize_per_spider(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo', download_maxsize=2))
yield self.assertFailure(d, defer.CancelledError, error.ConnectionAborted)
def test_download_with_large_maxsize_per_spider(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo', download_maxsize=100))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
return d
class Http11MockServerTestCase(unittest.TestCase):
"""HTTP 1.1 test case with MockServer"""
if twisted_version < (11, 1, 0):
skip = 'HTTP1.1 not supported in twisted < 11.1.0'
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
@defer.inlineCallbacks
def test_download_with_content_length(self):
crawler = get_crawler(SingleRequestSpider)
# http://localhost:8998/partial set Content-Length to 1024, use download_maxsize= 1000 to avoid
# download it
yield crawler.crawl(seed=Request(url='http://localhost:8998/partial', meta={'download_maxsize': 1000}))
failure = crawler.spider.meta['failure']
self.assertIsInstance(failure.value, defer.CancelledError)
@defer.inlineCallbacks
def test_download(self):
crawler = get_crawler(SingleRequestSpider)
yield crawler.crawl(seed=Request(url='http://localhost:8998'))
failure = crawler.spider.meta.get('failure')
self.assertTrue(failure == None)
reason = crawler.spider.meta['close_reason']
self.assertTrue(reason, 'finished')
@defer.inlineCallbacks
def test_download_gzip_response(self):
if six.PY2 and twisted_version > (12, 3, 0):
crawler = get_crawler(SingleRequestSpider)
body = '1'*100 # PayloadResource requires body length to be 100
request = Request('http://localhost:8998/payload', method='POST', body=body, meta={'download_maxsize': 50})
yield crawler.crawl(seed=request)
failure = crawler.spider.meta['failure']
# download_maxsize < 100, hence the CancelledError
self.assertIsInstance(failure.value, defer.CancelledError)
request.headers.setdefault('Accept-Encoding', 'gzip,deflate')
request = request.replace(url='http://localhost:8998/xpayload')
yield crawler.crawl(seed=request)
# download_maxsize = 50 is enough for the gzipped response
failure = crawler.spider.meta.get('failure')
self.assertTrue(failure == None)
reason = crawler.spider.meta['close_reason']
self.assertTrue(reason, 'finished')
else:
raise unittest.SkipTest("xpayload and payload endpoint only enabled for twisted > 12.3.0 and python 2.x")
class UriResource(resource.Resource):
"""Return the full uri that was requested"""
def getChild(self, path, request):
return self
def render(self, request):
return request.uri
class HttpProxyTestCase(unittest.TestCase):
download_handler_cls = HTTPDownloadHandler
def setUp(self):
site = server.Site(UriResource(), timeout=None)
wrapper = WrappingFactory(site)
self.port = reactor.listenTCP(0, wrapper, interface='127.0.0.1')
self.portno = self.port.getHost().port
self.download_handler = self.download_handler_cls(Settings())
self.download_request = self.download_handler.download_request
@defer.inlineCallbacks
def tearDown(self):
yield self.port.stopListening()
if hasattr(self.download_handler, 'close'):
yield self.download_handler.close()
def getURL(self, path):
return "http://127.0.0.1:%d/%s" % (self.portno, path)
def test_download_with_proxy(self):
def _test(response):
self.assertEquals(response.status, 200)
self.assertEquals(response.url, request.url)
self.assertEquals(response.body, 'http://example.com')
http_proxy = self.getURL('')
request = Request('http://example.com', meta={'proxy': http_proxy})
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_download_with_proxy_https_noconnect(self):
def _test(response):
self.assertEquals(response.status, 200)
self.assertEquals(response.url, request.url)
self.assertEquals(response.body, 'https://example.com')
http_proxy = '%s?noconnect' % self.getURL('')
request = Request('https://example.com', meta={'proxy': http_proxy})
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_download_without_proxy(self):
def _test(response):
self.assertEquals(response.status, 200)
self.assertEquals(response.url, request.url)
self.assertEquals(response.body, '/path/to/resource')
request = Request(self.getURL('path/to/resource'))
return self.download_request(request, Spider('foo')).addCallback(_test)
class DeprecatedHttpProxyTestCase(unittest.TestCase):
"""Old deprecated reference to http10 downloader handler"""
download_handler_cls = HttpDownloadHandler
class Http10ProxyTestCase(HttpProxyTestCase):
download_handler_cls = HTTP10DownloadHandler
class Http11ProxyTestCase(HttpProxyTestCase):
download_handler_cls = HTTP11DownloadHandler
if twisted_version < (11, 1, 0):
skip = 'HTTP1.1 not supported in twisted < 11.1.0'
class HttpDownloadHandlerMock(object):
def __init__(self, settings):
pass
def download_request(self, request, spider):
return request
class S3TestCase(unittest.TestCase):
download_handler_cls = S3DownloadHandler
try:
# can't instance without settings, but ignore that
download_handler_cls({})
except NotConfigured:
skip = 'missing boto library'
except KeyError: pass
# test use same example keys than amazon developer guide
# http://s3.amazonaws.com/awsdocs/S3/20060301/s3-dg-20060301.pdf
# and the tests described here are the examples from that manual
AWS_ACCESS_KEY_ID = '0PN5J17HBGZHT7JJ3X82'
AWS_SECRET_ACCESS_KEY = 'uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o'
def setUp(self):
s3reqh = S3DownloadHandler(Settings(), self.AWS_ACCESS_KEY_ID, \
self.AWS_SECRET_ACCESS_KEY, \
httpdownloadhandler=HttpDownloadHandlerMock)
self.download_request = s3reqh.download_request
self.spider = Spider('foo')
def test_request_signing1(self):
# gets an object from the johnsmith bucket.
req = Request('s3://johnsmith/photos/puppy.jpg',
headers={'Date': 'Tue, 27 Mar 2007 19:36:42 +0000'})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:xXjDGYUmKxnwqr5KXNPGldn5LbA=')
def test_request_signing2(self):
# puts an object into the johnsmith bucket.
req = Request('s3://johnsmith/photos/puppy.jpg', method='PUT', headers={
'Content-Type': 'image/jpeg',
'Date': 'Tue, 27 Mar 2007 21:15:45 +0000',
'Content-Length': '94328',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:hcicpDDvL9SsO6AkvxqmIWkmOuQ=')
def test_request_signing3(self):
# lists the content of the johnsmith bucket.
req = Request('s3://johnsmith/?prefix=photos&max-keys=50&marker=puppy', \
method='GET', headers={
'User-Agent': 'Mozilla/5.0',
'Date': 'Tue, 27 Mar 2007 19:42:41 +0000',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:jsRt/rhG+Vtp88HrYL706QhE4w4=')
def test_request_signing4(self):
# fetches the access control policy sub-resource for the 'johnsmith' bucket.
req = Request('s3://johnsmith/?acl', \
method='GET', headers={'Date': 'Tue, 27 Mar 2007 19:44:46 +0000'})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:thdUi9VAkzhkniLj96JIrOPGi0g=')
def test_request_signing5(self):
# deletes an object from the 'johnsmith' bucket using the
# path-style and Date alternative.
req = Request('s3://johnsmith/photos/puppy.jpg', \
method='DELETE', headers={
'Date': 'Tue, 27 Mar 2007 21:20:27 +0000',
'x-amz-date': 'Tue, 27 Mar 2007 21:20:26 +0000',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5Ip83xlYzk=')
def test_request_signing6(self):
# uploads an object to a CNAME style virtual hosted bucket with metadata.
req = Request('s3://static.johnsmith.net:8080/db-backup.dat.gz', \
method='PUT', headers={
'User-Agent': 'curl/7.15.5',
'Host': 'static.johnsmith.net:8080',
'Date': 'Tue, 27 Mar 2007 21:06:08 +0000',
'x-amz-acl': 'public-read',
'content-type': 'application/x-download',
'Content-MD5': '4gJE4saaMU4BqNR0kLY+lw==',
'X-Amz-Meta-ReviewedBy': 'joe@johnsmith.net,jane@johnsmith.net',
'X-Amz-Meta-FileChecksum': '0x02661779',
'X-Amz-Meta-ChecksumAlgorithm': 'crc32',
'Content-Disposition': 'attachment; filename=database.dat',
'Content-Encoding': 'gzip',
'Content-Length': '5913339',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:C0FlOtU8Ylb9KDTpZqYkZPX91iI=')
def test_request_signing7(self):
# ensure that spaces are quoted properly before signing
req = Request(
("s3://johnsmith/photos/my puppy.jpg"
"?response-content-disposition=my puppy.jpg"),
method='GET',
headers={
'Date': 'Tue, 27 Mar 2007 19:42:41 +0000',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(
httpreq.headers['Authorization'],
'AWS 0PN5J17HBGZHT7JJ3X82:+CfvG8EZ3YccOrRVMXNaK2eKZmM=')
class FTPTestCase(unittest.TestCase):
username = "scrapy"
password = "passwd"
if twisted_version < (10, 2, 0):
skip = "Twisted pre 10.2.0 doesn't allow to set home path other than /home"
def setUp(self):
# setup dirs and test file
self.directory = self.mktemp()
os.mkdir(self.directory)
userdir = os.path.join(self.directory, self.username)
os.mkdir(userdir)
fp = FilePath(userdir)
fp.child('file.txt').setContent("I have the power!")
fp.child('file with spaces.txt').setContent("Moooooooooo power!")
# setup server
realm = FTPRealm(anonymousRoot=self.directory, userHome=self.directory)
p = portal.Portal(realm)
users_checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
users_checker.addUser(self.username, self.password)
p.registerChecker(users_checker, credentials.IUsernamePassword)
self.factory = FTPFactory(portal=p)
self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
self.portNum = self.port.getHost().port
self.download_handler = FTPDownloadHandler(Settings())
self.addCleanup(self.port.stopListening)
def _add_test_callbacks(self, deferred, callback=None, errback=None):
def _clean(data):
self.download_handler.client.transport.loseConnection()
return data
deferred.addCallback(_clean)
if callback:
deferred.addCallback(callback)
if errback:
deferred.addErrback(errback)
return deferred
def test_ftp_download_success(self):
request = Request(url="ftp://127.0.0.1:%s/file.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'I have the power!')
self.assertEqual(r.headers, {'Local Filename': [''], 'Size': ['17']})
return self._add_test_callbacks(d, _test)
def test_ftp_download_path_with_spaces(self):
request = Request(
url="ftp://127.0.0.1:%s/file with spaces.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password}
)
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'Moooooooooo power!')
self.assertEqual(r.headers, {'Local Filename': [''], 'Size': ['18']})
return self._add_test_callbacks(d, _test)
def test_ftp_download_notexist(self):
request = Request(url="ftp://127.0.0.1:%s/notexist.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.status, 404)
return self._add_test_callbacks(d, _test)
def test_ftp_local_filename(self):
local_fname = "/tmp/file.txt"
request = Request(url="ftp://127.0.0.1:%s/file.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password, "ftp_local_filename": local_fname})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.body, local_fname)
self.assertEqual(r.headers, {'Local Filename': ['/tmp/file.txt'], 'Size': ['17']})
self.assertTrue(os.path.exists(local_fname))
with open(local_fname) as f:
self.assertEqual(f.read(), "I have the power!")
os.remove(local_fname)
return self._add_test_callbacks(d, _test)
def test_invalid_credentials(self):
request = Request(url="ftp://127.0.0.1:%s/file.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": 'invalid'})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.type, ConnectionLost)
return self._add_test_callbacks(d, errback=_test)
| {
"content_hash": "dfbdd669235e7221bf23c177e38d1a13",
"timestamp": "",
"source": "github",
"line_count": 610,
"max_line_length": 119,
"avg_line_length": 40.97704918032787,
"alnum_prop": 0.6443831012962074,
"repo_name": "yarikoptic/scrapy",
"id": "d2a349b40fa863abeba79d67419c3ec224498457",
"size": "24996",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_downloader_handlers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Groff",
"bytes": "2008"
},
{
"name": "HTML",
"bytes": "1809"
},
{
"name": "Python",
"bytes": "1314012"
},
{
"name": "Shell",
"bytes": "258"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from oscar.core.loading import get_model
from treebeard.admin import TreeAdmin
AttributeOption = get_model('catalogue', 'AttributeOption')
AttributeOptionGroup = get_model('catalogue', 'AttributeOptionGroup')
Category = get_model('catalogue', 'Category')
Option = get_model('catalogue', 'Option')
Product = get_model('catalogue', 'Product')
ProductAttribute = get_model('catalogue', 'ProductAttribute')
ProductAttributeValue = get_model('catalogue', 'ProductAttributeValue')
ProductCategory = get_model('catalogue', 'ProductCategory')
ProductClass = get_model('catalogue', 'ProductClass')
ProductImage = get_model('catalogue', 'ProductImage')
ProductRecommendation = get_model('catalogue', 'ProductRecommendation')
class AttributeInline(admin.TabularInline):
model = ProductAttributeValue
class ProductRecommendationInline(admin.TabularInline):
model = ProductRecommendation
fk_name = 'primary'
class CategoryInline(admin.TabularInline):
model = ProductCategory
extra = 1
class ProductAttributeInline(admin.TabularInline):
model = ProductAttribute
extra = 2
class ProductClassAdmin(admin.ModelAdmin):
list_display = ('name', 'requires_shipping', 'track_stock')
inlines = [ProductAttributeInline]
class ProductAdmin(admin.ModelAdmin):
list_display = ('get_title', 'upc', 'get_product_class', 'structure',
'attribute_summary', 'date_created')
prepopulated_fields = {"slug": ("title",)}
inlines = [AttributeInline, CategoryInline, ProductRecommendationInline]
class ProductAttributeAdmin(admin.ModelAdmin):
list_display = ('name', 'code', 'product_class', 'type')
prepopulated_fields = {"code": ("name", )}
class OptionAdmin(admin.ModelAdmin):
pass
class ProductAttributeValueAdmin(admin.ModelAdmin):
list_display = ('product', 'attribute', 'value')
class AttributeOptionInline(admin.TabularInline):
model = AttributeOption
class AttributeOptionGroupAdmin(admin.ModelAdmin):
list_display = ('name', 'option_summary')
inlines = [AttributeOptionInline, ]
class CategoryAdmin(TreeAdmin):
pass
admin.site.register(ProductClass, ProductClassAdmin)
admin.site.register(Product, ProductAdmin)
admin.site.register(ProductAttribute, ProductAttributeAdmin)
admin.site.register(ProductAttributeValue, ProductAttributeValueAdmin)
admin.site.register(AttributeOptionGroup, AttributeOptionGroupAdmin)
admin.site.register(Option, OptionAdmin)
admin.site.register(ProductImage)
admin.site.register(Category, CategoryAdmin)
admin.site.register(ProductCategory)
| {
"content_hash": "45b918a1ff67953e4c294e0529646157",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 76,
"avg_line_length": 31.337349397590362,
"alnum_prop": 0.7627835447904652,
"repo_name": "kapt/django-oscar",
"id": "130054aaa1a211eba14e5df86a89990c79986240",
"size": "2601",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "oscar/apps/catalogue/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1013938"
},
{
"name": "JavaScript",
"bytes": "926045"
},
{
"name": "Python",
"bytes": "5840384"
},
{
"name": "Shell",
"bytes": "6015"
},
{
"name": "XSLT",
"bytes": "49764"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from __future__ import unicode_literals
from builtins import str
import logging, sys, signal, subprocess, types, time, os, codecs, platform
string_types = str,
global logger
global stdout
global stderr
global timing
global log_command
logger = None
stdout = False
stderr = False
timing = True # print execution time of each command in the log, just after the return code
log_command = True # outputs the command being executed to the log (before command output)
_sentinel = object()
def quote_command(cmd):
"""
This function does assure that the command line is entirely quoted.
This is required in order to prevent getting "The input line is too long" error message.
"""
if not (os.name == "nt" or os.name == "dos"):
return cmd # the escaping is required only on Windows platforms, in fact it will break cmd line on others
import re
re_quoted_items = re.compile(r'" \s* [^"\s] [^"]* \"', re.VERBOSE)
woqi = re_quoted_items.sub('', cmd)
if len(cmd) == 0 or (len(woqi) > 0 and not (woqi[0] == '"' and woqi[-1] == '"')):
return '"' + cmd + '"'
else:
return cmd
def system3(cmd):
import tempfile
tf = tempfile.NamedTemporaryFile(mode="w+")
result = os.system(cmd+" 2>&1 | tee %s" % tf.name)
tf.flush()
tf.seek(0)
stdout_stderr = tf.readlines()
tf.close()
return result, stdout_stderr
def system2(cmd, cwd=None, logger=_sentinel, stdout=_sentinel, log_command=_sentinel, timing=_sentinel):
#def tee(cmd, cwd=None, logger=tee_logger, console=tee_console):
""" This is a simple placement for os.system() or subprocess.Popen()
that simulates how Unix tee() works - logging stdout/stderr using logging
If you specify file (name or handler) it will output to this file.
For filenames, it will open them as text for append and use UTF-8 encoding
logger parameter can be:
* 'string' - it will assume it is a filename, open it and log to it
* 'handle' - it just write to it
* 'function' - it call it using the message
* None - disable any logging
If logger parameter is not specified it will use python logging module.
This method return (returncode, output_lines_as_list)
"""
t = time.clock()
output = []
if log_command is _sentinel: log_command = globals().get('log_command')
if timing is _sentinel: timing = globals().get('timing')
if logger is _sentinel: # default to python native logger if logger parameter is not used
logger = globals().get('logger')
if stdout is _sentinel:
stdout = globals().get('stdout')
#logging.debug("logger=%s stdout=%s" % (logger, stdout))
f = sys.stdout
ascii_aliases = ('ascii', 'ANSI_X3.4-1968')
if not hasattr(f, 'encoding') or not f.encoding or f.encoding in ascii_aliases:
# `ascii` is not a valid encoding by our standards, it's better to output to UTF-8 because it can encoding any Unicode text
encoding = 'utf_8'
else:
encoding = f.encoding
def filelogger(msg):
try:
msg = msg + '\n' # we'll use the same endline on all platforms, you like it or not
try:
f.write(msg)
except TypeError:
f.write(msg.encode("utf-8"))
except Exception as e:
import traceback
print(' ****** ERROR: Exception: %s\nencoding = %s' % (e, encoding))
traceback.print_exc(file=sys.stderr)
sys.exit(-1)
pass
def nop(msg):
pass
if not logger:
mylogger = nop
elif isinstance(logger, string_types):
f = codecs.open(logger, "a+b", 'utf_8')
mylogger = filelogger
elif isinstance(logger, (types.FunctionType, types.MethodType, types.BuiltinFunctionType)):
mylogger = logger
else:
method_write = getattr(logger, "write", None)
# if we can call write() we'll aceppt it :D
if hasattr(method_write,'__call__'): # this should work for filehandles
f = logger
mylogger = filelogger
else:
sys.exit("tee() does not support this type of logger=%s" % type(logger))
if cwd is not None and not os.path.isdir(cwd):
os.makedirs(cwd) # this throws exception if fails
# samarkanov: commented 'quote_command' deliberately
# reason: if I have 'quote_command' Sumatra does not work in Windows (it encloses the command in quotes. I did not understand why should we quote)
# I have never catched "The input line is too long" (yet?)
# cmd = quote_command(cmd)
p = subprocess.Popen(cmd, cwd=cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=(platform.system() == 'Linux'))
if(log_command):
mylogger("Running: %s" % cmd)
try:
while True:
try:
line = p.stdout.readline()
line = line.decode(encoding)
except Exception as e:
logging.error(e)
logging.error("The output of the command could not be decoded as %s\ncmd: %s\n line ignored: %s" %\
(encoding, cmd, repr(line)))
pass
output.append(line)
if not line:
break
#line = line.rstrip('\n\r')
mylogger(line.rstrip('\n\r')) # they are added by logging anyway
#import pdb; pdb.set_trace()
if(stdout):
print(line, end="")
sys.stdout.flush()
returncode = p.wait()
except KeyboardInterrupt:
# Popen.returncode:
# "A negative value -N indicates that the child was terminated by signal N (Unix only)."
# see https://docs.python.org/2/library/subprocess.html#subprocess.Popen.returncode
returncode = -signal.SIGINT
if(log_command):
if(timing):
def secondsToStr(t):
from functools import reduce
return "%02d:%02d:%02d" % reduce(lambda ll,b : divmod(ll[0],b) + ll[1:], [(t*1000,),1000,60,60])[:3]
mylogger("Returned: %d (execution time %s)\n" % (returncode, secondsToStr(time.clock()-t)))
else:
mylogger("Returned: %d\n" % (returncode))
if not returncode == 0: # running a tool that returns non-zero? this deserves a warning
logging.warning("Returned: %d from: %s\nOutput %s" % (returncode, cmd, ''.join(output)))
return(returncode, output)
def system(cmd, cwd=None, logger=None, stdout=None, log_command=_sentinel, timing=_sentinel):
""" System does not return a tuple """
(returncode, output) = system2(cmd, cwd=cwd, logger=logger, stdout=stdout, log_command=log_command, timing=timing)
return(returncode)
if __name__ == '__main__':
import colorer
import tempfile, os
logging.basicConfig(level=logging.NOTSET,
format='%(message)s')
# default (stdout)
print("#1")
system("python --version")
# function/method
print("#2")
system("python --version", logger=logging.error)
# function (this is the same as default)
print("#3")
system("python --version", logger=print)
# handler
print("#4")
f = tempfile.NamedTemporaryFile()
system("python --version", logger=f)
f.close()
# test with string (filename)
print("#5")
(f, fname) = tempfile.mkstemp()
system("python --version", logger=fname)
os.close(f)
os.unlink(fname)
print("#6")
stdout = False
logger = None
system("echo test")
print("#7")
stdout = True
system("echo test2")
| {
"content_hash": "a700be8bcf1614181191ec4bb7b9d03b",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 154,
"avg_line_length": 39.76851851851852,
"alnum_prop": 0.5443538998835855,
"repo_name": "babsey/sumatra",
"id": "a2123083f30d18e407eb74263f146d72fa55507c",
"size": "8679",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sumatra/tee.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "8168"
},
{
"name": "HTML",
"bytes": "104980"
},
{
"name": "JavaScript",
"bytes": "4287"
},
{
"name": "Python",
"bytes": "574995"
},
{
"name": "R",
"bytes": "3325"
},
{
"name": "Shell",
"bytes": "35759"
},
{
"name": "TeX",
"bytes": "7153"
}
],
"symlink_target": ""
} |
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class SimpleStorage100Device(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
SimpleStorage100Device - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'name': 'str',
'oem': 'ResourceOem',
'status': 'ResourceStatus'
}
self.attribute_map = {
'name': 'Name',
'oem': 'Oem',
'status': 'Status'
}
self._name = None
self._oem = None
self._status = None
@property
def name(self):
"""
Gets the name of this SimpleStorage100Device.
The name of the resource or array element.
:return: The name of this SimpleStorage100Device.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this SimpleStorage100Device.
The name of the resource or array element.
:param name: The name of this SimpleStorage100Device.
:type: str
"""
self._name = name
@property
def oem(self):
"""
Gets the oem of this SimpleStorage100Device.
:return: The oem of this SimpleStorage100Device.
:rtype: ResourceOem
"""
return self._oem
@oem.setter
def oem(self, oem):
"""
Sets the oem of this SimpleStorage100Device.
:param oem: The oem of this SimpleStorage100Device.
:type: ResourceOem
"""
self._oem = oem
@property
def status(self):
"""
Gets the status of this SimpleStorage100Device.
:return: The status of this SimpleStorage100Device.
:rtype: ResourceStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this SimpleStorage100Device.
:param status: The status of this SimpleStorage100Device.
:type: ResourceStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "db548de4699cea9720065ac87454cf0e",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 77,
"avg_line_length": 26.074074074074073,
"alnum_prop": 0.5577651515151515,
"repo_name": "jlongever/redfish-client-python",
"id": "58aea6b2092cea8aeb131e266951331de6ccc3c6",
"size": "4241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "on_http_redfish_1_0/models/simple_storage_1_0_0_device.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "939832"
}
],
"symlink_target": ""
} |
from django.template import Library
import math
register = Library()
@register.filter
def absolute(x):
return abs(x)
@register.filter
def subtract(x, y):
return x - y
@register.filter
def multiply(x, y):
return x * y
@register.filter
def divide(x, y):
return x / y
@register.filter
def idivide(x, y):
return x // y
@register.filter
def log_ratio(x, y, z=math.e):
if x == 0:
return 0
def log(n):
if n > 0:
return pow(n, 1 / z)
return -pow(-n, 1 / z)
return log(x) / log(y)
@register.filter
def modulo(x, y):
return x % y
| {
"content_hash": "15c8ae6d90b422908868a8b0cd6f81cb",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 35,
"avg_line_length": 12.46938775510204,
"alnum_prop": 0.5810147299509002,
"repo_name": "dezede/dezede",
"id": "7c04efa58ff49157a6ad04d398e01e2591e61e67",
"size": "611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/templatetags/math_tags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "10100"
},
{
"name": "HTML",
"bytes": "205803"
},
{
"name": "JavaScript",
"bytes": "53836"
},
{
"name": "Less",
"bytes": "21716"
},
{
"name": "Python",
"bytes": "818952"
},
{
"name": "Shell",
"bytes": "433"
},
{
"name": "TeX",
"bytes": "5922"
}
],
"symlink_target": ""
} |
import sys
sys.path.insert(1, "../../../")
import h2o
def binop_star(ip,port):
iris = h2o.import_file(path=h2o.locate("smalldata/iris/iris_wheader.csv"))
rows, cols = iris.dim
iris.show()
#frame/scaler
res = iris * 99
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == cols, "dimension mismatch"
for x, y in zip([res[c].sum() for c in range(cols-1)], [86773.5, 45351.9, 55816.2, 17800.2]):
assert abs(x - y) < 1e-7, "unexpected column sums."
res = 5 * iris
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == cols, "dimension mismatch"
#frame/vec
#try:
# res = iris * iris[0]
# res.show()
# assert False, "expected error. objects of different dimensions not supported."
#except EnvironmentError:
# pass
#try:
# res = iris[2] * iris
# res.show()
# assert False, "expected error. objects of different dimensions not supported."
#except EnvironmentError:
# pass
#vec/vec
res = iris[0] * iris[1]
res.show()
assert abs(res.sum() - 2670.98) < 1e-2, "expected different column sum"
res = iris[0] * iris[1] * iris[2] * iris[3]
res.show()
assert abs(res.sum() - 16560.42) < 1e-2, "expected different sum"
# frame/frame
res = iris * iris
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == cols, "dimension mismatch"
res = iris[0:2] * iris[1:3]
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == 2, "dimension mismatch"
#try:
# res = iris * iris[0:3]
# res.show()
# assert False, "expected error. frames are different dimensions."
#except EnvironmentError:
# pass
if __name__ == "__main__":
h2o.run_test(sys.argv, binop_star)
| {
"content_hash": "9d5cefb1b1646fdf0ef8260ae1d29a57",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 97,
"avg_line_length": 28.2,
"alnum_prop": 0.5859247135842881,
"repo_name": "PawarPawan/h2o-v3",
"id": "0ee89400aebba6deff9b8fe725d1f144bd77d7a0",
"size": "1833",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_munging/binop/pyunit_binop2_star.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "CSS",
"bytes": "163561"
},
{
"name": "CoffeeScript",
"bytes": "261942"
},
{
"name": "Emacs Lisp",
"bytes": "8914"
},
{
"name": "Groovy",
"bytes": "78"
},
{
"name": "HTML",
"bytes": "140122"
},
{
"name": "Java",
"bytes": "5407730"
},
{
"name": "JavaScript",
"bytes": "88331"
},
{
"name": "Makefile",
"bytes": "31513"
},
{
"name": "Python",
"bytes": "2009340"
},
{
"name": "R",
"bytes": "1818630"
},
{
"name": "Rebol",
"bytes": "3997"
},
{
"name": "Ruby",
"bytes": "299"
},
{
"name": "Scala",
"bytes": "16336"
},
{
"name": "Shell",
"bytes": "44607"
},
{
"name": "TeX",
"bytes": "469926"
}
],
"symlink_target": ""
} |
import chainer
import chainer.testing
import chainer.utils
import mpi4py.MPI
import numpy
import pytest
from chainermn.communicators.naive_communicator import NaiveCommunicator
from chainermn.communicators.pure_nccl_communicator import PureNcclCommunicator
from chainermn.links import MultiNodeBatchNormalization
from chainermn import nccl
mpi_comm = mpi4py.MPI.COMM_WORLD
class ModelNormalBN(chainer.Chain):
def __init__(self, n_in=3, n_units=3, n_out=2):
super(ModelNormalBN, self).__init__()
with self.init_scope():
self.l1 = chainer.links.Linear(n_in, n_units, nobias=True)
self.bn1 = chainer.links.BatchNormalization(n_units)
self.l2 = chainer.links.Linear(n_in, n_units, nobias=True)
self.bn2 = chainer.links.BatchNormalization(n_units)
self.l3 = chainer.links.Linear(n_in, n_out)
self.train = True
def __call__(self, x):
h = chainer.functions.relu(self.bn1(self.l1(x)))
h = chainer.functions.relu(self.bn2(self.l2(h)))
return self.l3(h)
class ModelDistributedBN(chainer.Chain):
def __init__(self, comm, n_in=3, n_units=3, n_out=2, backend='auto'):
super(ModelDistributedBN, self).__init__()
with self.init_scope():
self.l1 = chainer.links.Linear(n_in, n_units, nobias=True)
self.bn1 = MultiNodeBatchNormalization(
n_units, comm, communication_backend=backend)
self.l2 = chainer.links.Linear(n_in, n_units, nobias=True)
self.bn2 = MultiNodeBatchNormalization(
n_units, comm, communication_backend=backend)
self.l3 = chainer.links.Linear(n_in, n_out)
self.train = True
def __call__(self, x):
h = chainer.functions.relu(self.bn1(self.l1(x)))
h = chainer.functions.relu(self.bn2(self.l2(h)))
return self.l3(h)
def check_multi_node_bn(comm, use_gpu=False, backend='auto',
dtype=numpy.float32):
"""Tests correctness of MultiNodeBatchNormalization.
This test verifies MultiNodeBatchNormalization by comparing
the following four configurations.
(1) Single worker, normal BatchNormalization
(2) Multiple workers, normal BatchNormalization
(3) Single worker, MultiNodeBatchNormalization
(4) Multiple workers, MultiNodeBatchNormalization
Single worker: only using the result of worker 0, which uses the whole
batch.
Multiple workers: Each worker uses the 1/n part of the whole batch,
where n is the number of nodes, and gradient is aggregated.
This test conducts the forward and backward computation once for the
deterministic model parameters and an input batch, and checks the
gradients of parameters.
The purpose of MultiNodeBatchNormalization is to make the results of
(4) to be exactly same as (1). Therefore, the essential part is to
check that the results of (1) and (4) are the same. The results of (3)
should also be also same as them. In contrast, the results of (2) is
not necessarily always same as them, and we can expect that it is
almost always different. Therefore, we also check that the results of
(2) is different from them, to see that this test working correctly.
"""
local_batchsize = 8
global_batchsize = local_batchsize * comm.size
ndim = 3
numpy.random.seed(71)
x = numpy.random.random(
(global_batchsize, ndim)).astype(numpy.float32)
y = numpy.random.randint(
0, 1, size=global_batchsize, dtype=numpy.int32)
x_local = comm.mpi_comm.scatter(
x.reshape(comm.size, local_batchsize, ndim))
y_local = comm.mpi_comm.scatter(
y.reshape(comm.size, local_batchsize))
io_dtype = dtype
l_dtype = dtype
bn_dtype = dtype
if dtype == chainer.mixed16:
io_dtype = numpy.float16
l_dtype = numpy.float16
bn_dtype = numpy.float32
x = x.astype(io_dtype)
x_local = x_local.astype(io_dtype)
if use_gpu:
x = chainer.cuda.to_gpu(x)
y = chainer.cuda.to_gpu(y)
x_local = chainer.cuda.to_gpu(x_local)
y_local = chainer.cuda.to_gpu(y_local)
cls = chainer.links.Classifier
with chainer.using_config('dtype', dtype):
# Single worker
m1 = cls(ModelNormalBN())
# Multi worker, Ghost BN
m2 = cls(ModelNormalBN())
# Single worker, MNBN
m3 = cls(ModelDistributedBN(comm, backend=backend))
# Multi worker, MNBN
m4 = cls(ModelDistributedBN(comm, backend=backend))
# NOTE: m1, m3 and m4 should behave in the same way.
# m2 may be different.
if use_gpu:
m1.to_gpu()
m2.to_gpu()
m3.to_gpu()
m4.to_gpu()
m2.copyparams(m1)
m3.copyparams(m1)
m4.copyparams(m1)
l1 = m1(x, y)
m1.cleargrads()
l1.backward()
l2 = m2(x_local, y_local)
m2.cleargrads()
l2.backward()
comm.allreduce_grad(m2)
l3 = m3(x, y)
m3.cleargrads()
l3.backward()
l4 = m4(x_local, y_local)
m4.cleargrads()
l4.backward()
comm.allreduce_grad(m4)
if comm.rank == 0:
for p1, p2, p3, p4 in zip(
sorted(m1.namedparams()),
sorted(m2.namedparams()),
sorted(m3.namedparams()),
sorted(m4.namedparams())):
name = p1[0]
assert (p2[0] == name)
assert (p3[0] == name)
assert (p4[0] == name)
if '/l' in name:
param_dtype = l_dtype
else:
param_dtype = bn_dtype
assert (p1[1].data.dtype == param_dtype)
assert (p2[1].data.dtype == param_dtype)
assert (p3[1].data.dtype == param_dtype)
assert (p4[1].data.dtype == param_dtype)
assert_option = {'atol': 1e-4, 'rtol': 1e-3}
if param_dtype == numpy.float16:
assert_option = {'atol': 1e-2, 'rtol': 1e-2}
chainer.testing.assert_allclose(p1[1].grad, p3[1].grad,
**assert_option)
chainer.testing.assert_allclose(p1[1].grad, p4[1].grad,
**assert_option)
# This is to see that this test is valid.
if comm.size >= 2:
assert_not_allclose(p1[1].grad, p2[1].grad)
def check_link_copyable(comm):
# Regression test for #5854
bn0 = ModelDistributedBN(comm)
bn1 = bn0.copy(mode='copy')
assert bn1 is not None
bn2 = MultiNodeBatchNormalization(10, comm)
bn3 = bn2.copy(mode='copy')
assert bn3 is not None
def assert_not_allclose(x, y, atol=1e-5, rtol=1e-4, verbose=True):
x = chainer.cuda.to_cpu(chainer.utils.force_array(x))
y = chainer.cuda.to_cpu(chainer.utils.force_array(y))
with pytest.raises(AssertionError):
chainer.testing.assert_allclose(
x, y, atol=atol, rtol=rtol, verbose=verbose)
def create_communicator(communicator_class, mpi_comm, use_gpu):
if PureNcclCommunicator == communicator_class:
use_nccl = True
else:
use_nccl = False
if use_gpu and not use_nccl and nccl.get_build_version() < 2000:
pytest.skip('This test requires NCCL version >= 2.0')
communicator = communicator_class(mpi_comm)
if use_gpu:
chainer.cuda.get_device_from_id(communicator.intra_rank).use()
return communicator
def test_version_check():
comm = create_communicator(NaiveCommunicator, mpi_comm, use_gpu=False)
if chainer.__version__.startswith('1.'):
with pytest.raises(RuntimeError):
MultiNodeBatchNormalization(3, comm)
else:
# Expecting no exceptions
MultiNodeBatchNormalization(3, comm)
@pytest.mark.parametrize(('communicator_class', 'backend', 'dtype'), [
(NaiveCommunicator, 'mpi', numpy.float16),
(NaiveCommunicator, 'mpi', numpy.float32),
(NaiveCommunicator, 'mpi', chainer.mixed16)])
def test_multi_node_bn_cpu(communicator_class, backend, dtype):
comm = create_communicator(communicator_class, mpi_comm,
use_gpu=False)
check_multi_node_bn(comm, backend=backend, dtype=dtype)
check_link_copyable(comm)
comm.mpi_comm.barrier()
@pytest.mark.parametrize(('communicator_class', 'backend', 'dtype'), [
(NaiveCommunicator, 'mpi', numpy.float32),
(PureNcclCommunicator, 'mpi', numpy.float32),
(PureNcclCommunicator, 'mpi', numpy.float16),
(PureNcclCommunicator, 'mpi', chainer.mixed16),
(PureNcclCommunicator, 'nccl', numpy.float32),
(PureNcclCommunicator, 'nccl', numpy.float16),
(PureNcclCommunicator, 'nccl', chainer.mixed16)])
@chainer.testing.attr.gpu
def test_multi_node_bn_gpu(communicator_class, backend, dtype):
comm = create_communicator(communicator_class, mpi_comm,
use_gpu=True)
check_multi_node_bn(comm, use_gpu=True, backend=backend, dtype=dtype)
check_link_copyable(comm)
chainer.cuda.Stream.null.synchronize()
comm.mpi_comm.barrier()
if hasattr(comm, 'nccl_comm'):
comm.nccl_comm.destroy()
@pytest.mark.parametrize(('communicator_class', 'backend'), [
(NaiveCommunicator, 'mpi'),
(NaiveCommunicator, 'auto')])
def test_support_communication_backend_cpu(communicator_class, backend):
n_units = 1
comm = create_communicator(communicator_class,
mpi_comm, use_gpu=False)
MultiNodeBatchNormalization(n_units, comm,
communication_backend=backend)
@pytest.mark.parametrize(('communicator_class', 'backend'), [
(NaiveCommunicator, 'nccl'),
(NaiveCommunicator, 'dummy')])
def test_unsupport_communication_backend_cpu(communicator_class, backend):
n_units = 1
comm = create_communicator(communicator_class,
mpi_comm, use_gpu=False)
with pytest.raises(ValueError):
MultiNodeBatchNormalization(n_units, comm,
communication_backend=backend)
@pytest.mark.parametrize(('communicator_class', 'backend'), [
(NaiveCommunicator, 'mpi'),
(NaiveCommunicator, 'auto'),
(PureNcclCommunicator, 'mpi'),
(PureNcclCommunicator, 'nccl'),
(PureNcclCommunicator, 'auto')])
@chainer.testing.attr.gpu
def test_support_communication_backend_gpu(communicator_class, backend):
n_units = 1
comm = create_communicator(communicator_class,
mpi_comm, use_gpu=True)
MultiNodeBatchNormalization(n_units, comm,
communication_backend=backend)
@pytest.mark.parametrize(('communicator_class', 'backend'), [
(NaiveCommunicator, 'nccl'),
(NaiveCommunicator, 'dummy'),
(PureNcclCommunicator, 'dummy')])
@chainer.testing.attr.gpu
def test_unsupport_communication_backend_gpu(communicator_class, backend):
n_units = 1
comm = create_communicator(communicator_class,
mpi_comm, use_gpu=True)
with pytest.raises(ValueError):
MultiNodeBatchNormalization(n_units, comm,
communication_backend=backend)
| {
"content_hash": "e49a426d91cc329caf7c2602390c3ee7",
"timestamp": "",
"source": "github",
"line_count": 312,
"max_line_length": 79,
"avg_line_length": 35.881410256410255,
"alnum_prop": 0.629477445288075,
"repo_name": "keisuke-umezawa/chainer",
"id": "df7b0c6d4ea0ac7ca3df9372f24853952d820e09",
"size": "11195",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/chainermn_tests/links_tests/test_batch_normalization.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "C",
"bytes": "70"
},
{
"name": "C++",
"bytes": "1492567"
},
{
"name": "CMake",
"bytes": "47556"
},
{
"name": "Cuda",
"bytes": "116243"
},
{
"name": "Dockerfile",
"bytes": "1457"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "5655558"
},
{
"name": "Shell",
"bytes": "30075"
}
],
"symlink_target": ""
} |
from testutils import mock
import errno
import sys
from rbuild_test import rbuildhelp
from rbuild.internal import main
from rbuild import handle
from rbuild import errors
from rbuild import rbuildcfg
from rmake import errors as rmakeerrors
class MainTest(rbuildhelp.RbuildHelper):
def testExceptions(self):
def genRaiseExceptionFn(exception):
def fn(*args, **kw):
raise exception
return fn
def _testException(exception, debugAll=False):
self.mock(main.RbuildMain, 'getCommand',
genRaiseExceptionFn(exception))
if debugAll:
return self.captureOutput(main.main,
['rbuild', 'help', '--debug-all'])
return self.captureOutput(main.main,
['rbuild', 'help'])
# if rMake isn't running, we get rmake.errors.OpenError
self.logFilter.add()
rc, _ = _testException(rmakeerrors.OpenError('Error communicating to server at unix:///var/lib/rmake/socket: Connection refused'))
self.assertEquals(self.logFilter.records, ['error: Error communicating to server at unix:///var/lib/rmake/socket: Connection refused\n\nCould not contact the rMake server. Perhaps the rMake service is not\nrunning. To start the rMake service, as root, try running the command:\nservice rmake restart'])
self.logFilter.clear()
# other rmake errors are displayed verbatim, as they are designed for
self.logFilter.add()
rc, _ = _testException(rmakeerrors.RmakeError('Dazed and Confused'))
self.assertEquals(self.logFilter.records, ['error: Dazed and Confused'])
self.logFilter.clear()
# pipe errors generally mean EOF when writing to less, e.g.
rc, _ = _testException(IOError(errno.EPIPE, 'Pipe Error'))
self.assertEquals(rc, 0)
self.assertRaises(IOError, _testException,
IOError('Other IO Error'))
self.assertRaises(RuntimeError, _testException,
RuntimeError('Other IO Error'))
self.logFilter.add()
rc, _ = _testException(errors.RbuildError('foo'))
self.assertEquals(self.logFilter.records, ['error: foo'])
self.assertEquals(rc, 1)
self.logFilter.remove()
# test with --debug-all
rc, _ = _testException(errors.RbuildError('foo'))
self.assertEquals(rc, 1)
self.assertRaises(errors.RbuildError, _testException,
errors.RbuildError('foo'), debugAll=True)
self.mock(main.RbuildMain, 'main', lambda *args, **kw: None)
assert(main.main(['rbuild', 'help']) == 0)
self.mock(main.RbuildMain, 'main', lambda *args, **kw: 23)
assert(main.main(['rbuild', 'help']) == 23)
oldargv = sys.argv
try:
sys.argv = ['rbuild', 'help']
assert(main.main() == 23)
finally:
sys.argv = oldargv
def testGetCommand(self):
mainHandler = main.RbuildMain()
cmd = mainHandler.getCommand(['rbuild', 'build'], self.rbuildCfg)
self.assertEquals(cmd.__class__.__name__, 'BuildCommand')
def testRunCommand(self):
mainHandler = main.RbuildMain()
cmd = mainHandler.getCommand(['rbuild', 'help'], self.rbuildCfg)
productStore = mock.MockObject()
h = mainHandler.handle
h.productStore = productStore
cfg = h.getConfig()
mock.mock(h.Config, 'isComplete')
h.Config.isComplete._mock.setReturn(False, cfg)
mock.mockMethod(h.Config.initializeConfig)
outputList = []
rc, txt = self.captureOutput(mainHandler.runCommand,
cmd, self.rbuildCfg, {}, ['rbuild', 'help'])
h.Config.initializeConfig._mock.assertNotCalled()
cmd = mainHandler.getCommand(['rbuild', 'build'], self.rbuildCfg)
self.rbuildCfg.serverUrl = 'some value'
productStore = mock.MockObject()
mainHandler.handle.productStore = productStore
self.checkCall(mainHandler.runCommand,
[cmd, self.rbuildCfg, {'stage' : 'foo'}, [] ],
{},
'rbuild_plugins.build.BuildCommand.runCommand',
[cmd, handle.RbuildHandle, {}, []])
productStore.setActiveStageName._mock.assertCalled('foo')
class FakeCommand:
def runCommand(self, handle, argSet, args):
raise errors.PluginError('eek')
cmd = FakeCommand()
h = mainHandler.handle
h.ui = mock.MockObject()
self.assertRaises(errors.PluginError, mainHandler.runCommand, cmd,
mock.MockObject(), {}, [])
self.assertEquals(h.ui.popContext._mock.popCall()[0][0],
'Command failed with exception %r')
h.ui.popContext._mock.raiseErrorOnAccess(IOError)
# Note: not IOError -- the IOError should be ignored and the
# PluginError should propogate
self.assertRaises(errors.PluginError, mainHandler.runCommand, cmd,
mock.MockObject(), {}, [])
def testUsageByClass(self):
mainHandler = main.RbuildMain()
#Test with a regular command
cmd = mainHandler.getCommand(['rbuild', 'config'], self.rbuildCfg)
usage = mainHandler._getUsageByClass(cmd)
self.assertEquals(usage.strip(), 'rbuild config')
cmd = mainHandler.getCommand(['rbuild', 'init'], self.rbuildCfg)
usage = mainHandler._getUsageByClass(cmd)
self.assertEquals(usage.strip(), 'rbuild init <project shortname> <version>\n or: rbuild init <label>')
class FakeCommand:
name = 'bar'
paramHelp = 'some help'
cmd = FakeCommand()
usage = mainHandler._getUsageByClass(cmd)
self.assertEquals(usage, 'rbuild bar some help')
cmd = mainHandler.getCommand(['rbuild', 'config'], self.rbuildCfg)
mainHandler.name = None
self.assertRaises(AssertionError, mainHandler._getUsageByClass, cmd)
def testInitializeConfig(self):
mainHandler = main.RbuildMain()
cmd = mainHandler.getCommand(['rbuild', 'build'], self.rbuildCfg)
self.rbuildCfg.serverUrl = 'some value'
productStore = mock.MockObject()
h = mainHandler.handle
h.productStore = productStore
cfg = h.getConfig()
mock.mock(h.Config, 'isComplete')
h.Config.isComplete._mock.setReturn(False, cfg)
mock.mockMethod(h.Config.initializeConfig)
self.checkCall(mainHandler.runCommand,
[cmd, self.rbuildCfg, {'stage' : 'foo'}, [] ],
{},
'rbuild_plugins.build.BuildCommand.runCommand',
[cmd, handle.RbuildHandle, {}, []])
| {
"content_hash": "a48f09afc07d2034b39c847316f32af5",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 312,
"avg_line_length": 40.239766081871345,
"alnum_prop": 0.6068885336433658,
"repo_name": "fedora-conary/rbuild",
"id": "0722b21c25fe327fe1b03725942be1bc5c68aad6",
"size": "7486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rbuild_test/unit_test/internaltest/maintest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "686899"
},
{
"name": "Shell",
"bytes": "3446"
}
],
"symlink_target": ""
} |
from rest_framework import generics, permissions as drf_permissions
from rest_framework.exceptions import ValidationError, NotFound
from framework.auth.oauth_scopes import CoreScopes
from website.project.model import Q, Node
from api.base import permissions as base_permissions
from api.base.views import JSONAPIBaseView
from api.base.serializers import HideIfRetraction
from api.registrations.serializers import (
RegistrationSerializer,
RegistrationDetailSerializer,
RegistrationContributorsSerializer,
)
from api.nodes.views import (
NodeMixin, ODMFilterMixin, NodeContributorsList, NodeRegistrationsList,
NodeChildrenList, NodeCommentsList, NodeProvidersList, NodeLinksList,
NodeContributorDetail, NodeFilesList, NodeLinksDetail, NodeFileDetail,
NodeAlternativeCitationsList, NodeAlternativeCitationDetail, NodeLogList,
NodeInstitutionDetail, WaterButlerMixin)
from api.registrations.serializers import RegistrationNodeLinksSerializer
from api.nodes.permissions import (
ContributorOrPublic,
ReadOnlyIfRegistration,
)
from api.base.utils import get_object_or_error
class RegistrationMixin(NodeMixin):
"""Mixin with convenience methods for retrieving the current registration based on the
current URL. By default, fetches the current registration based on the node_id kwarg.
"""
serializer_class = RegistrationSerializer
node_lookup_url_kwarg = 'node_id'
def get_node(self, check_object_permissions=True):
node = get_object_or_error(
Node,
self.kwargs[self.node_lookup_url_kwarg],
display_name='node'
)
# Nodes that are folders/collections are treated as a separate resource, so if the client
# requests a collection through a node endpoint, we return a 404
if node.is_folder or not node.is_registration:
raise NotFound
# May raise a permission denied
if check_object_permissions:
self.check_object_permissions(self.request, node)
return node
class RegistrationList(JSONAPIBaseView, generics.ListAPIView, ODMFilterMixin):
"""Node Registrations.
Registrations are read-only snapshots of a project. This view is a list of all current registrations for which a user
has access. A retracted registration will display a limited subset of information, namely, title, description,
date_created, registration, retracted, date_registered, retraction_justification, and registration supplement. All
other fields will be displayed as null. Additionally, the only relationships permitted to be accessed for a retraction
are the contributors.
Each resource contains the full representation of the registration, meaning additional requests to an individual
registrations's detail view are not necessary. Unregistered nodes cannot be accessed through this endpoint.
##Registration Attributes
Registrations have the "registrations" `type`.
name type description
=======================================================================================================
title string title of the registered project or component
description string description of the registered node
category string node category, must be one of the allowed values
date_created iso8601 timestamp timestamp that the node was created
date_modified iso8601 timestamp timestamp when the node was last updated
tags array of strings list of tags that describe the registered node
fork boolean is this project a fork?
registration boolean has this project been registered?
dashboard boolean is this registered node visible on the user dashboard?
public boolean has this registration been made publicly-visible?
retracted boolean has this registration been retracted?
date_registered iso8601 timestamp timestamp that the registration was created
retraction_justification string reasons for retracting the registration
pending_retraction boolean is this registration pending retraction?
pending_registration_approval boolean is this registration pending approval?
pending_embargo boolean is this registration pending an embargo?
registered_meta dictionary registration supplementary information
registration_supplement string registration template
##Relationships
###Registered from
The registration is branched from this node.
###Registered by
The registration was initiated by this user.
###Other Relationships
See documentation on registered_from detail view. A registration has many of the same properties as a node.
##Links
See the [JSON-API spec regarding pagination](http://jsonapi.org/format/1.0/#fetching-pagination).
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ]
required_write_scopes = [CoreScopes.NODE_REGISTRATIONS_WRITE]
serializer_class = RegistrationSerializer
view_category = 'registrations'
view_name = 'registration-list'
# overrides ODMFilterMixin
def get_default_odm_query(self):
base_query = (
Q('is_deleted', 'ne', True) &
Q('is_registration', 'eq', True)
)
user = self.request.user
permission_query = Q('is_public', 'eq', True)
if not user.is_anonymous():
permission_query = (permission_query | Q('contributors', 'eq', user._id))
query = base_query & permission_query
return query
def is_blacklisted(self, query):
for query_param in query.nodes:
field_name = getattr(query_param, 'attribute', None)
if not field_name:
continue
field = self.serializer_class._declared_fields.get(field_name)
if isinstance(field, HideIfRetraction):
return True
return False
# overrides ListAPIView
def get_queryset(self):
query = self.get_query_from_request()
blacklisted = self.is_blacklisted(query)
nodes = Node.find(query)
# If attempting to filter on a blacklisted field, exclude retractions.
if blacklisted:
non_retracted_list = [node._id for node in nodes if not node.is_retracted]
non_retracted_nodes = Node.find(Q('_id', 'in', non_retracted_list))
return non_retracted_nodes
return nodes
class RegistrationDetail(JSONAPIBaseView, generics.RetrieveAPIView, RegistrationMixin, WaterButlerMixin):
"""Node Registrations.
Registrations are read-only snapshots of a project. This view shows details about the given registration.
Each resource contains the full representation of the registration, meaning additional requests to an individual
registration's detail view are not necessary. A retracted registration will display a limited subset of information,
namely, title, description, date_created, registration, retracted, date_registered, retraction_justification, and registration
supplement. All other fields will be displayed as null. Additionally, the only relationships permitted to be accessed
for a retracted registration are the contributors.
##Registration Attributes
Registrations have the "registrations" `type`.
name type description
=======================================================================================================
title string title of the registered project or component
description string description of the registered node
category string node category, must be one of the allowed values
date_created iso8601 timestamp timestamp that the node was created
date_modified iso8601 timestamp timestamp when the node was last updated
tags array of strings list of tags that describe the registered node
fork boolean is this project a fork?
registration boolean has this project been registered?
dashboard boolean is this registered node visible on the user dashboard?
public boolean has this registration been made publicly-visible?
retracted boolean has this registration been retracted?
date_registered iso8601 timestamp timestamp that the registration was created
retraction_justification string reasons for retracting the registration
pending_retraction boolean is this registration pending retraction?
pending_registration_approval boolean is this registration pending approval?
pending_embargo boolean is this registration pending an embargo?
registered_meta dictionary registration supplementary information
registration_supplement string registration template
##Relationships
###Registered from
The registration is branched from this node.
###Registered by
The registration was initiated by this user.
###Other Relationships
See documentation on registered_from detail view. A registration has many of the same properties as a node.
##Links
self: the canonical api endpoint of this registration
html: this registration's page on the OSF website
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
ContributorOrPublic,
ReadOnlyIfRegistration,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ]
required_write_scopes = [CoreScopes.NODE_REGISTRATIONS_WRITE]
serializer_class = RegistrationDetailSerializer
view_category = 'registrations'
view_name = 'registration-detail'
# overrides RetrieveAPIView
def get_object(self):
registration = self.get_node()
if not registration.is_registration:
raise ValidationError('This is not a registration.')
return registration
class RegistrationContributorsList(NodeContributorsList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-contributors'
def get_serializer_class(self):
return RegistrationContributorsSerializer
class RegistrationContributorDetail(NodeContributorDetail, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-contributor-detail'
serializer_class = RegistrationContributorsSerializer
class RegistrationChildrenList(NodeChildrenList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-children'
class RegistrationCommentsList(NodeCommentsList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-comments'
class RegistrationLogList(NodeLogList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-logs'
class RegistrationProvidersList(NodeProvidersList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-providers'
class RegistrationNodeLinksList(NodeLinksList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-pointers'
serializer_class = RegistrationNodeLinksSerializer
class RegistrationNodeLinksDetail(NodeLinksDetail, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-pointer-detail'
serializer_class = RegistrationNodeLinksSerializer
class RegistrationRegistrationsList(NodeRegistrationsList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-providers'
class RegistrationFilesList(NodeFilesList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-files'
class RegistrationFileDetail(NodeFileDetail, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-file-detail'
class RegistrationAlternativeCitationsList(NodeAlternativeCitationsList, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-alternative-citations'
class RegistrationAlternativeCitationDetail(NodeAlternativeCitationDetail, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-alternative-citation-detail'
class RegistrationInstitutionDetail(NodeInstitutionDetail, RegistrationMixin):
view_category = 'registrations'
view_name = 'registration-institution-detail'
| {
"content_hash": "a3af75c67ea757a7a8d157a22c52e937",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 130,
"avg_line_length": 43.095238095238095,
"alnum_prop": 0.6713812154696133,
"repo_name": "billyhunt/osf.io",
"id": "5f3499b917fb48f43b86dd8157c8f74816296219",
"size": "13575",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "api/registrations/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "133568"
},
{
"name": "HTML",
"bytes": "58491"
},
{
"name": "JavaScript",
"bytes": "1369885"
},
{
"name": "Mako",
"bytes": "619064"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "4835804"
},
{
"name": "Shell",
"bytes": "2118"
}
],
"symlink_target": ""
} |
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_batch_iv_analysis(object):
def setupUi(self, batch_iv_analysis):
batch_iv_analysis.setObjectName("batch_iv_analysis")
batch_iv_analysis.resize(847, 809)
self.centralwidget = QtWidgets.QWidget(batch_iv_analysis)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.tehTabs = QtWidgets.QTabWidget(self.centralwidget)
self.tehTabs.setObjectName("tehTabs")
self.resultsTabs = QtWidgets.QWidget()
self.resultsTabs.setObjectName("resultsTabs")
self.gridLayout_3 = QtWidgets.QGridLayout(self.resultsTabs)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.tableWidget = QtWidgets.QTableWidget(self.resultsTabs)
self.tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.tableWidget.setAlternatingRowColors(True)
self.tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectItems)
self.tableWidget.setColumnCount(0)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setRowCount(0)
self.tableWidget.horizontalHeader().setCascadingSectionResizes(False)
self.tableWidget.horizontalHeader().setDefaultSectionSize(92)
self.tableWidget.horizontalHeader().setSortIndicatorShown(True)
self.tableWidget.verticalHeader().setVisible(False)
self.tableWidget.verticalHeader().setDefaultSectionSize(30)
self.tableWidget.verticalHeader().setSortIndicatorShown(True)
self.gridLayout_3.addWidget(self.tableWidget, 0, 0, 1, 1)
self.tehTabs.addTab(self.resultsTabs, "")
self.plotTab = QtWidgets.QWidget()
self.plotTab.setObjectName("plotTab")
self.tehTabs.addTab(self.plotTab, "")
self.settingsTab = QtWidgets.QWidget()
self.settingsTab.setObjectName("settingsTab")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.settingsTab)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.formLayout_2 = QtWidgets.QFormLayout()
self.formLayout_2.setFieldGrowthPolicy(QtWidgets.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_2.setObjectName("formLayout_2")
self.attemptCharEqnFitLabel = QtWidgets.QLabel(self.settingsTab)
self.attemptCharEqnFitLabel.setObjectName("attemptCharEqnFitLabel")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.attemptCharEqnFitLabel)
self.attemptCharEqnFitCheckBox = QtWidgets.QCheckBox(self.settingsTab)
self.attemptCharEqnFitCheckBox.setChecked(True)
self.attemptCharEqnFitCheckBox.setObjectName("attemptCharEqnFitCheckBox")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.attemptCharEqnFitCheckBox)
self.doFastAndSloppyMathLabel = QtWidgets.QLabel(self.settingsTab)
self.doFastAndSloppyMathLabel.setObjectName("doFastAndSloppyMathLabel")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.doFastAndSloppyMathLabel)
self.doFastAndSloppyMathCheckBox = QtWidgets.QCheckBox(self.settingsTab)
self.doFastAndSloppyMathCheckBox.setChecked(True)
self.doFastAndSloppyMathCheckBox.setObjectName("doFastAndSloppyMathCheckBox")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.doFastAndSloppyMathCheckBox)
self.lowerVoltageCutoffLabel = QtWidgets.QLabel(self.settingsTab)
self.lowerVoltageCutoffLabel.setObjectName("lowerVoltageCutoffLabel")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.lowerVoltageCutoffLabel)
self.lowerVoltageCutoffLineEdit = QtWidgets.QLineEdit(self.settingsTab)
self.lowerVoltageCutoffLineEdit.setObjectName("lowerVoltageCutoffLineEdit")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.lowerVoltageCutoffLineEdit)
self.upperVoltageCutoffLabel = QtWidgets.QLabel(self.settingsTab)
self.upperVoltageCutoffLabel.setObjectName("upperVoltageCutoffLabel")
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.upperVoltageCutoffLabel)
self.upperVoltageCutoffLineEdit = QtWidgets.QLineEdit(self.settingsTab)
self.upperVoltageCutoffLineEdit.setObjectName("upperVoltageCutoffLineEdit")
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.upperVoltageCutoffLineEdit)
self.fitMethodLabel = QtWidgets.QLabel(self.settingsTab)
self.fitMethodLabel.setObjectName("fitMethodLabel")
self.formLayout_2.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.fitMethodLabel)
self.fitMethodComboBox = QtWidgets.QComboBox(self.settingsTab)
self.fitMethodComboBox.setObjectName("fitMethodComboBox")
self.fitMethodComboBox.addItem("")
self.fitMethodComboBox.addItem("")
self.fitMethodComboBox.addItem("")
self.formLayout_2.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.fitMethodComboBox)
self.verbosityLabel = QtWidgets.QLabel(self.settingsTab)
self.verbosityLabel.setObjectName("verbosityLabel")
self.formLayout_2.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.verbosityLabel)
self.verbositySpinBox = QtWidgets.QSpinBox(self.settingsTab)
self.verbositySpinBox.setMaximum(2)
self.verbositySpinBox.setObjectName("verbositySpinBox")
self.formLayout_2.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.verbositySpinBox)
self.analysisThreadsLabel = QtWidgets.QLabel(self.settingsTab)
self.analysisThreadsLabel.setObjectName("analysisThreadsLabel")
self.formLayout_2.setWidget(7, QtWidgets.QFormLayout.LabelRole, self.analysisThreadsLabel)
self.analysisThreadsSpinBox = QtWidgets.QSpinBox(self.settingsTab)
self.analysisThreadsSpinBox.setMinimum(1)
self.analysisThreadsSpinBox.setProperty("value", 8)
self.analysisThreadsSpinBox.setObjectName("analysisThreadsSpinBox")
self.formLayout_2.setWidget(7, QtWidgets.QFormLayout.FieldRole, self.analysisThreadsSpinBox)
self.useMultithreadingModeLabel = QtWidgets.QLabel(self.settingsTab)
self.useMultithreadingModeLabel.setObjectName("useMultithreadingModeLabel")
self.formLayout_2.setWidget(6, QtWidgets.QFormLayout.LabelRole, self.useMultithreadingModeLabel)
self.useMultithreadingModeCheckBox = QtWidgets.QCheckBox(self.settingsTab)
self.useMultithreadingModeCheckBox.setChecked(True)
self.useMultithreadingModeCheckBox.setObjectName("useMultithreadingModeCheckBox")
self.formLayout_2.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.useMultithreadingModeCheckBox)
self.verticalLayout_4.addLayout(self.formLayout_2)
self.resetSettingsButton = QtWidgets.QPushButton(self.settingsTab)
self.resetSettingsButton.setObjectName("resetSettingsButton")
self.verticalLayout_4.addWidget(self.resetSettingsButton, 0, QtCore.Qt.AlignRight)
self.horizontalLayout.addLayout(self.verticalLayout_4)
self.tehTabs.addTab(self.settingsTab, "")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.Rsh_ub = QtWidgets.QLineEdit(self.tab)
self.Rsh_ub.setGeometry(QtCore.QRect(250, 190, 113, 32))
self.Rsh_ub.setObjectName("Rsh_ub")
self.Rsh_lb = QtWidgets.QLineEdit(self.tab)
self.Rsh_lb.setGeometry(QtCore.QRect(120, 190, 113, 32))
self.Rsh_lb.setObjectName("Rsh_lb")
self.label_3 = QtWidgets.QLabel(self.tab)
self.label_3.setGeometry(QtCore.QRect(20, 50, 61, 20))
self.label_3.setObjectName("label_3")
self.I0_lb = QtWidgets.QLineEdit(self.tab)
self.I0_lb.setGeometry(QtCore.QRect(120, 40, 113, 32))
self.I0_lb.setObjectName("I0_lb")
self.n_ub = QtWidgets.QLineEdit(self.tab)
self.n_ub.setGeometry(QtCore.QRect(250, 240, 113, 32))
self.n_ub.setObjectName("n_ub")
self.label_6 = QtWidgets.QLabel(self.tab)
self.label_6.setGeometry(QtCore.QRect(20, 200, 81, 20))
self.label_6.setObjectName("label_6")
self.label = QtWidgets.QLabel(self.tab)
self.label.setGeometry(QtCore.QRect(290, 10, 41, 20))
self.label.setObjectName("label")
self.line_6 = QtWidgets.QFrame(self.tab)
self.line_6.setGeometry(QtCore.QRect(20, 220, 351, 20))
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.label_2 = QtWidgets.QLabel(self.tab)
self.label_2.setGeometry(QtCore.QRect(160, 10, 41, 20))
self.label_2.setObjectName("label_2")
self.n_lb = QtWidgets.QLineEdit(self.tab)
self.n_lb.setGeometry(QtCore.QRect(120, 240, 113, 32))
self.n_lb.setObjectName("n_lb")
self.label_7 = QtWidgets.QLabel(self.tab)
self.label_7.setGeometry(QtCore.QRect(20, 250, 81, 20))
self.label_7.setObjectName("label_7")
self.I0_ub = QtWidgets.QLineEdit(self.tab)
self.I0_ub.setGeometry(QtCore.QRect(250, 40, 113, 32))
self.I0_ub.setObjectName("I0_ub")
self.Rs_lb = QtWidgets.QLineEdit(self.tab)
self.Rs_lb.setGeometry(QtCore.QRect(120, 140, 113, 32))
self.Rs_lb.setObjectName("Rs_lb")
self.line_4 = QtWidgets.QFrame(self.tab)
self.line_4.setGeometry(QtCore.QRect(20, 120, 351, 20))
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.line_3 = QtWidgets.QFrame(self.tab)
self.line_3.setGeometry(QtCore.QRect(20, 70, 351, 20))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.Rs_ub = QtWidgets.QLineEdit(self.tab)
self.Rs_ub.setGeometry(QtCore.QRect(250, 140, 113, 32))
self.Rs_ub.setObjectName("Rs_ub")
self.line_2 = QtWidgets.QFrame(self.tab)
self.line_2.setGeometry(QtCore.QRect(120, 20, 251, 20))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.line = QtWidgets.QFrame(self.tab)
self.line.setGeometry(QtCore.QRect(233, 20, 20, 251))
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label_4 = QtWidgets.QLabel(self.tab)
self.label_4.setGeometry(QtCore.QRect(20, 100, 61, 20))
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.tab)
self.label_5.setGeometry(QtCore.QRect(20, 150, 71, 20))
self.label_5.setObjectName("label_5")
self.line_5 = QtWidgets.QFrame(self.tab)
self.line_5.setGeometry(QtCore.QRect(20, 170, 351, 20))
self.line_5.setFrameShape(QtWidgets.QFrame.HLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.Iph_ub = QtWidgets.QLineEdit(self.tab)
self.Iph_ub.setGeometry(QtCore.QRect(250, 90, 113, 32))
self.Iph_ub.setObjectName("Iph_ub")
self.Iph_lb = QtWidgets.QLineEdit(self.tab)
self.Iph_lb.setGeometry(QtCore.QRect(120, 90, 113, 32))
self.Iph_lb.setObjectName("Iph_lb")
self.tehTabs.addTab(self.tab, "")
self.gridLayout.addWidget(self.tehTabs, 1, 0, 1, 1)
batch_iv_analysis.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(batch_iv_analysis)
self.menubar.setGeometry(QtCore.QRect(0, 0, 847, 25))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuTools = QtWidgets.QMenu(self.menubar)
self.menuTools.setObjectName("menuTools")
batch_iv_analysis.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(batch_iv_analysis)
self.statusbar.setEnabled(True)
self.statusbar.setObjectName("statusbar")
batch_iv_analysis.setStatusBar(self.statusbar)
self.tehDock = QtWidgets.QDockWidget(batch_iv_analysis)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tehDock.sizePolicy().hasHeightForWidth())
self.tehDock.setSizePolicy(sizePolicy)
self.tehDock.setMinimumSize(QtCore.QSize(93, 118))
self.tehDock.setFeatures(QtWidgets.QDockWidget.DockWidgetFloatable|QtWidgets.QDockWidget.DockWidgetMovable)
self.tehDock.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea|QtCore.Qt.LeftDockWidgetArea|QtCore.Qt.RightDockWidgetArea)
self.tehDock.setObjectName("tehDock")
self.dockWidgetContents = QtWidgets.QWidget()
self.dockWidgetContents.setObjectName("dockWidgetContents")
self.gridLayout_2 = QtWidgets.QGridLayout(self.dockWidgetContents)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.eventLog = QtWidgets.QTextBrowser(self.dockWidgetContents)
self.eventLog.setObjectName("eventLog")
self.gridLayout_2.addWidget(self.eventLog, 0, 0, 1, 1)
self.tehDock.setWidget(self.dockWidgetContents)
batch_iv_analysis.addDockWidget(QtCore.Qt.DockWidgetArea(8), self.tehDock)
self.actionQuit = QtWidgets.QAction(batch_iv_analysis)
self.actionQuit.setObjectName("actionQuit")
self.actionOpen = QtWidgets.QAction(batch_iv_analysis)
self.actionOpen.setObjectName("actionOpen")
self.actionSave = QtWidgets.QAction(batch_iv_analysis)
self.actionSave.setObjectName("actionSave")
self.actionClear_Table = QtWidgets.QAction(batch_iv_analysis)
self.actionClear_Table.setEnabled(True)
self.actionClear_Table.setObjectName("actionClear_Table")
self.actionFsadf = QtWidgets.QAction(batch_iv_analysis)
self.actionFsadf.setObjectName("actionFsadf")
self.actionSet_Bounds = QtWidgets.QAction(batch_iv_analysis)
self.actionSet_Bounds.setObjectName("actionSet_Bounds")
self.actionWatch = QtWidgets.QAction(batch_iv_analysis)
self.actionWatch.setObjectName("actionWatch")
self.actionEnable_Watching = QtWidgets.QAction(batch_iv_analysis)
self.actionEnable_Watching.setCheckable(True)
self.actionEnable_Watching.setChecked(False)
self.actionEnable_Watching.setObjectName("actionEnable_Watching")
self.actionWatch_2 = QtWidgets.QAction(batch_iv_analysis)
self.actionWatch_2.setObjectName("actionWatch_2")
self.actionFit_Constraints = QtWidgets.QAction(batch_iv_analysis)
self.actionFit_Constraints.setObjectName("actionFit_Constraints")
self.menuFile.addAction(self.actionOpen)
self.menuFile.addAction(self.actionWatch_2)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionSave)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionQuit)
self.menuTools.addAction(self.actionClear_Table)
self.menuTools.addAction(self.actionEnable_Watching)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuTools.menuAction())
self.retranslateUi(batch_iv_analysis)
self.tehTabs.setCurrentIndex(0)
self.fitMethodComboBox.setCurrentIndex(2)
self.actionQuit.triggered.connect(batch_iv_analysis.close)
QtCore.QMetaObject.connectSlotsByName(batch_iv_analysis)
def retranslateUi(self, batch_iv_analysis):
_translate = QtCore.QCoreApplication.translate
batch_iv_analysis.setWindowTitle(_translate("batch_iv_analysis", "batch-iv-analysis"))
self.tableWidget.setSortingEnabled(True)
self.tehTabs.setTabText(self.tehTabs.indexOf(self.resultsTabs), _translate("batch_iv_analysis", "Results"))
self.tehTabs.setTabText(self.tehTabs.indexOf(self.plotTab), _translate("batch_iv_analysis", "Plots"))
self.attemptCharEqnFitLabel.setText(_translate("batch_iv_analysis", "Attempt Char. Eqn. Fit"))
self.doFastAndSloppyMathLabel.setText(_translate("batch_iv_analysis", "Do Fast and Sloppy Math"))
self.lowerVoltageCutoffLabel.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Any data points below this voltage will be ignored</p></body></html>"))
self.lowerVoltageCutoffLabel.setText(_translate("batch_iv_analysis", "Lower Voltage Cutoff"))
self.lowerVoltageCutoffLineEdit.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Any data points below this voltage will be ignored</p></body></html>"))
self.lowerVoltageCutoffLineEdit.setText(_translate("batch_iv_analysis", "-inf"))
self.upperVoltageCutoffLabel.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Any data points above this voltage will be ignored</p></body></html>"))
self.upperVoltageCutoffLabel.setText(_translate("batch_iv_analysis", "Upper Voltage Cutoff"))
self.upperVoltageCutoffLineEdit.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Any data points above this voltage will be ignored</p></body></html>"))
self.upperVoltageCutoffLineEdit.setText(_translate("batch_iv_analysis", "inf"))
self.fitMethodLabel.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Fit method to use in scipy.optimize.least_squares routine</p></body></html>"))
self.fitMethodLabel.setText(_translate("batch_iv_analysis", "Fit Method"))
self.fitMethodComboBox.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Fit method to use in scipy.optimize.least_squares routine</p></body></html>"))
self.fitMethodComboBox.setItemText(0, _translate("batch_iv_analysis", "Trust Region Reflective"))
self.fitMethodComboBox.setItemText(1, _translate("batch_iv_analysis", "dogleg"))
self.fitMethodComboBox.setItemText(2, _translate("batch_iv_analysis", "Levenberg-Marquardt"))
self.verbosityLabel.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Higher verbosity will generate more output status messages which can be helpful for debugging</p></body></html>"))
self.verbosityLabel.setText(_translate("batch_iv_analysis", "Verbosity"))
self.analysisThreadsLabel.setText(_translate("batch_iv_analysis", "Analysis Threads"))
self.useMultithreadingModeLabel.setToolTip(_translate("batch_iv_analysis", "<html><head/><body><p>Speeds up analysis by analyzing multiple files in parallel</p></body></html>"))
self.useMultithreadingModeLabel.setText(_translate("batch_iv_analysis", "Use Multiprocessing Mode"))
self.resetSettingsButton.setText(_translate("batch_iv_analysis", "Reset Defaults"))
self.tehTabs.setTabText(self.tehTabs.indexOf(self.settingsTab), _translate("batch_iv_analysis", "Settings"))
self.Rsh_ub.setText(_translate("batch_iv_analysis", "inf"))
self.Rsh_lb.setText(_translate("batch_iv_analysis", "0"))
self.label_3.setText(_translate("batch_iv_analysis", "I_0 [A]"))
self.I0_lb.setText(_translate("batch_iv_analysis", "0"))
self.n_ub.setText(_translate("batch_iv_analysis", "inf"))
self.label_6.setText(_translate("batch_iv_analysis", "R_sh [ohm]"))
self.label.setText(_translate("batch_iv_analysis", "Upper"))
self.label_2.setText(_translate("batch_iv_analysis", "Lower"))
self.n_lb.setText(_translate("batch_iv_analysis", "0"))
self.label_7.setText(_translate("batch_iv_analysis", "n"))
self.I0_ub.setText(_translate("batch_iv_analysis", "inf"))
self.Rs_lb.setText(_translate("batch_iv_analysis", "0"))
self.Rs_ub.setText(_translate("batch_iv_analysis", "inf"))
self.label_4.setText(_translate("batch_iv_analysis", "I_Ph [A]"))
self.label_5.setText(_translate("batch_iv_analysis", "R_s [ohm]"))
self.Iph_ub.setText(_translate("batch_iv_analysis", "inf"))
self.Iph_lb.setText(_translate("batch_iv_analysis", "0"))
self.tehTabs.setTabText(self.tehTabs.indexOf(self.tab), _translate("batch_iv_analysis", "Constraints"))
self.menuFile.setTitle(_translate("batch_iv_analysis", "File"))
self.menuTools.setTitle(_translate("batch_iv_analysis", "Tools"))
self.tehDock.setWindowTitle(_translate("batch_iv_analysis", "Event Log"))
self.actionQuit.setText(_translate("batch_iv_analysis", "Quit"))
self.actionQuit.setShortcut(_translate("batch_iv_analysis", "Ctrl+Q"))
self.actionOpen.setText(_translate("batch_iv_analysis", "Open"))
self.actionOpen.setShortcut(_translate("batch_iv_analysis", "Ctrl+O"))
self.actionSave.setText(_translate("batch_iv_analysis", "Export"))
self.actionSave.setShortcut(_translate("batch_iv_analysis", "Ctrl+S"))
self.actionClear_Table.setText(_translate("batch_iv_analysis", "Clear Table"))
self.actionClear_Table.setShortcut(_translate("batch_iv_analysis", "Ctrl+Backspace"))
self.actionFsadf.setText(_translate("batch_iv_analysis", "fsadf"))
self.actionSet_Bounds.setText(_translate("batch_iv_analysis", "Set Bounds"))
self.actionWatch.setText(_translate("batch_iv_analysis", "Watch"))
self.actionWatch.setShortcut(_translate("batch_iv_analysis", "Ctrl+W"))
self.actionEnable_Watching.setText(_translate("batch_iv_analysis", "Enable Watching"))
self.actionEnable_Watching.setShortcut(_translate("batch_iv_analysis", "Ctrl+E"))
self.actionWatch_2.setText(_translate("batch_iv_analysis", "Watch"))
self.actionWatch_2.setShortcut(_translate("batch_iv_analysis", "Ctrl+W"))
self.actionFit_Constraints.setText(_translate("batch_iv_analysis", "Fit Constraints"))
| {
"content_hash": "fac775860942f8a622ac295a771b478a",
"timestamp": "",
"source": "github",
"line_count": 336,
"max_line_length": 208,
"avg_line_length": 67.61309523809524,
"alnum_prop": 0.71564398274496,
"repo_name": "greysAcademicCode/batch-iv-analysis",
"id": "b05c65fbbc38d0d6e7c65d3bd119b404ce68c023",
"size": "22939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "batch_iv_analysis/batch_iv_analysis_UI.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3146"
},
{
"name": "Mathematica",
"bytes": "989"
},
{
"name": "Matlab",
"bytes": "1770"
},
{
"name": "Python",
"bytes": "160350"
}
],
"symlink_target": ""
} |
from abc import abstractmethod
class RiverDriver(object):
def __init__(self, workflow, wokflow_object_class, field_name):
self.workflow = workflow
self.wokflow_object_class = wokflow_object_class
self.field_name = field_name
self._cached_workflow = None
@abstractmethod
def get_available_approvals(self, as_user):
raise NotImplementedError()
| {
"content_hash": "9787a1295758fe122f5827f3d11c9f65",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 67,
"avg_line_length": 28.428571428571427,
"alnum_prop": 0.6809045226130653,
"repo_name": "javrasya/django-river",
"id": "b22cee213236ef79258346396b9b0e640017d870",
"size": "398",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "river/driver/river_driver.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Gherkin",
"bytes": "15065"
},
{
"name": "Python",
"bytes": "254833"
},
{
"name": "Shell",
"bytes": "617"
}
],
"symlink_target": ""
} |
from tempest.api.compute import base
from tempest import exceptions
from tempest.test import attr
class ServicesAdminV3TestJSON(base.BaseV3ComputeAdminTest):
"""
Tests Services API. List and Enable/Disable require admin privileges.
"""
_interface = 'json'
@classmethod
def setUpClass(cls):
super(ServicesAdminV3TestJSON, cls).setUpClass()
cls.client = cls.services_admin_client
cls.non_admin_client = cls.services_client
@attr(type='gate')
def test_list_services(self):
resp, services = self.client.list_services()
self.assertEqual(200, resp.status)
self.assertNotEqual(0, len(services))
@attr(type=['negative', 'gate'])
def test_list_services_with_non_admin_user(self):
self.assertRaises(exceptions.Unauthorized,
self.non_admin_client.list_services)
@attr(type='gate')
def test_get_service_by_service_binary_name(self):
binary_name = 'nova-compute'
params = {'binary': binary_name}
resp, services = self.client.list_services(params)
self.assertEqual(200, resp.status)
self.assertNotEqual(0, len(services))
for service in services:
self.assertEqual(binary_name, service['binary'])
@attr(type='gate')
def test_get_service_by_host_name(self):
resp, services = self.client.list_services()
host_name = services[0]['host']
services_on_host = [service for service in services if
service['host'] == host_name]
params = {'host': host_name}
resp, services = self.client.list_services(params)
# we could have a periodic job checkin between the 2 service
# lookups, so only compare binary lists.
s1 = map(lambda x: x['binary'], services)
s2 = map(lambda x: x['binary'], services_on_host)
# sort the lists before comparing, to take out dependency
# on order.
self.assertEqual(sorted(s1), sorted(s2))
@attr(type=['negative', 'gate'])
def test_get_service_by_invalid_params(self):
# return all services if send the request with invalid parameter
resp, services = self.client.list_services()
params = {'xxx': 'nova-compute'}
resp, services_xxx = self.client.list_services(params)
self.assertEqual(200, resp.status)
self.assertEqual(len(services), len(services_xxx))
@attr(type='gate')
def test_get_service_by_service_and_host_name(self):
resp, services = self.client.list_services()
host_name = services[0]['host']
binary_name = services[0]['binary']
params = {'host': host_name, 'binary': binary_name}
resp, services = self.client.list_services(params)
self.assertEqual(200, resp.status)
self.assertEqual(1, len(services))
self.assertEqual(host_name, services[0]['host'])
self.assertEqual(binary_name, services[0]['binary'])
@attr(type=['negative', 'gate'])
def test_get_service_by_invalid_service_and_valid_host(self):
resp, services = self.client.list_services()
host_name = services[0]['host']
params = {'host': host_name, 'binary': 'xxx'}
resp, services = self.client.list_services(params)
self.assertEqual(200, resp.status)
self.assertEqual(0, len(services))
@attr(type=['negative', 'gate'])
def test_get_service_with_valid_service_and_invalid_host(self):
resp, services = self.client.list_services()
binary_name = services[0]['binary']
params = {'host': 'xxx', 'binary': binary_name}
resp, services = self.client.list_services(params)
self.assertEqual(200, resp.status)
self.assertEqual(0, len(services))
@attr(type='gate')
def test_service_enable_disable(self):
resp, services = self.client.list_services()
host_name = services[0]['host']
binary_name = services[0]['binary']
resp, service = self.client.disable_service(host_name, binary_name)
self.assertEqual(200, resp.status)
params = {'host': host_name, 'binary': binary_name}
resp, services = self.client.list_services(params)
self.assertEqual('disabled', services[0]['status'])
resp, service = self.client.enable_service(host_name, binary_name)
self.assertEqual(200, resp.status)
resp, services = self.client.list_services(params)
self.assertEqual('enabled', services[0]['status'])
class ServicesAdminV3TestXML(ServicesAdminV3TestJSON):
_interface = 'xml'
| {
"content_hash": "9f5e396b7ceafe17913f446304ba1b73",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 75,
"avg_line_length": 39.333333333333336,
"alnum_prop": 0.6375488917861799,
"repo_name": "eltonkevani/tempest_el_env",
"id": "67f9947ce6b5155e613b28445a4900acbef3a6e5",
"size": "5305",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/api/compute/v3/admin/test_services.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1871339"
},
{
"name": "Shell",
"bytes": "5748"
}
],
"symlink_target": ""
} |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NodesOperations(object):
"""NodesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.databoxedge.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_data_box_edge_device(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NodeList"]
"""Gets all the nodes currently configured under this Data Box Edge device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NodeList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databoxedge.v2019_07_01.models.NodeList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NodeList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_data_box_edge_device.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NodeList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_data_box_edge_device.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/nodes'} # type: ignore
| {
"content_hash": "8549f2aac0a09a8537c3fb65e1ebffc1",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 207,
"avg_line_length": 44.90350877192982,
"alnum_prop": 0.6454385622191834,
"repo_name": "Azure/azure-sdk-for-python",
"id": "d27783f71dc1fe5a1b53d2ff6174c181d61ccae4",
"size": "5586",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_07_01/operations/_nodes_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
from pygments.lexers import get_all_lexers
from pygments.styles import get_all_styles
from pygments.lexers import get_lexer_by_name
from pygments.formatters.html import HtmlFormatter
from pygments import highlight
LEXERS = [item for item in get_all_lexers() if item[1]]
LANGUAGE_CHOICES = sorted([(item[1][0], item[0]) for item in LEXERS])
STYLE_CHOICES = sorted((item, item) for item in get_all_styles())
class Snippet(models.Model):
created = models.DateTimeField(auto_now_add=True)
title = models.TextField()
code = models.TextField()
linenos = models.BooleanField(default=False)
language = models.CharField(choices=LANGUAGE_CHOICES, default='python', max_length=100)
style = models.CharField(choices=STYLE_CHOICES, default='friendly', max_length=100)
owner = models.ForeignKey('auth.User', related_name='snippets', on_delete=models.CASCADE)
highlighted = models.TextField()
class Meta:
ordering = ('created',)
def save(self, *args, **kwargs):
"""
Use the `pygments` library to create a highlighted HTML
representation of the code snippet.
"""
lexer = get_lexer_by_name(self.language)
linenos = self.linenos and 'table' or False
options = self.title and {'title': self.title} or {}
formatter = HtmlFormatter(style=self.style, linenos=linenos,
full=True, **options)
self.highlighted = highlight(self.code, lexer, formatter)
super(Snippet, self).save(*args, **kwargs)
| {
"content_hash": "64bd5eaac7c987f8951093825d8b9424",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 93,
"avg_line_length": 39.90243902439025,
"alnum_prop": 0.6839853300733496,
"repo_name": "Yogi1994/django-rest-tutorial",
"id": "2b184e7b9242fe94f2edf36d4c7f5b6554bb6001",
"size": "1660",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snippets/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "36258"
}
],
"symlink_target": ""
} |
"""
a routine to run a nested sampling class
"""
import cPickle as pickle
import copy
def save_replicas_to_binary(fout, ns):
checkpoint = {}
checkpoint['replicas'] = ns.replicas
checkpoint['iter_number'] = ns.iter_number
checkpoint['failed_mc_walks'] = ns.failed_mc_walks
checkpoint['_mc_niter'] = ns._mc_niter
pickle.dump( checkpoint, open(fout,"wb"), pickle.HIGHEST_PROTOCOL)
def load_replicas_from_binary(fin):
checkpoint = pickle.load(open(fin, "rb"))
return checkpoint
def load_checkpoint(fin, ns):
checkpoint = load_replicas_from_binary(fin)
ns.replicas = copy.deepcopy(checkpoint['replicas'])
ns.nreplicas = copy.deepcopy(len(ns.replicas))
ns.iter_number = copy.deepcopy(checkpoint['iter_number'])
ns.failed_mc_walks = copy.deepcopy(checkpoint['failed_mc_walks'])
ns._mc_niter = copy.deepcopy(checkpoint['_mc_niter'])
def remove_energies(fout, Emax):
temp_file = open(fout,'rb')
temp_energies = []
for i, energy in enumerate(temp_file):
if energy > Emax:
temp_energies.append(energy)
else:
break
temp_file.close()
temp_file = open(fout, 'wb')
temp_file.writelines(temp_energies)
temp_file.close()
def write_energies(fout, max_energies, isave=0):
fout.write( "\n".join([ str(e) for e in max_energies[isave:]]) )
fout.write("\n")
def run_nested_sampling(ns, label="ns_out", etol=0.01, maxiter=None,
iprint_replicas=1000):
isave = 0
i = 0
print "nreplicas", len(ns.replicas)
if ns.cpfile == None:
fout_replicas_name = label + ".replicas.p"
else:
fout_replicas_name = ns.cpfile
fout_energies_file = label+".energies"
if ns.cpstart == True:
fout_energies = open(fout_energies_file, "ab")
else:
fout_energies = open(fout_energies_file, "wb")
while True:
#start from checkpoint binary file?
if ns.cpstart == True and i == 0 :
load_checkpoint(fout_replicas_name, ns)
remove_energies(fout_energies_file, ns.replicas[-1].energy)
ediff = ns.replicas[-1].energy - ns.replicas[0].energy
# save max energies to a file
if i != 0 and i % 100 == 0:
write_energies(fout_energies, ns.max_energies, isave=isave)
isave = len(ns.max_energies)
#pickle current replicas and write them to a file current replicas to a file
cpfreq = ns.cpfreq
if i % cpfreq == 0:
save_replicas_to_binary(fout_replicas_name, ns)
if ediff < etol: break
if maxiter is not None and i >= maxiter: break
ns.one_iteration()
i += 1
write_energies(fout_energies, ns.max_energies, isave=isave)
fout_energies.close()
#print_replicas(fout_replicas, ns.replicas)
#fout_replicas.close()
# save final replica energies to a file
# save them with highest energy first
with open(label+".replicas_final", "w") as fout:
write_energies(fout, [r.energy for r in reversed(ns.replicas)])
print "min replica energy", ns.replicas[0].energy
print "max replica energy", ns.replicas[-1].energy
return ns
| {
"content_hash": "4b20fbc0581d1e5639ace7723cebe71b",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 84,
"avg_line_length": 31.54368932038835,
"alnum_prop": 0.6183441058787319,
"repo_name": "js850/nested_sampling",
"id": "d7b42b9023ec8e8a8e9ff721c6a8ab43f31c8fd8",
"size": "3249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nested_sampling/_nested_sampling_runner.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "277255"
},
{
"name": "CSS",
"bytes": "2624"
},
{
"name": "Makefile",
"bytes": "7715"
},
{
"name": "Python",
"bytes": "116537"
}
],
"symlink_target": ""
} |
import json
import urllib
import datetime
import sys
import base64
from pprint import pprint
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
reload(sys)
sys.setdefaultencoding("utf-8")
html = urllib.urlopen('http://theguardian.co.uk')
soup = BeautifulSoup(html)
spans = soup.find_all('div', attrs={'class':'trail-text'})
con = u""
for item in spans:
if item.string:
yay = item.string.strip()
yay = ' '.join([word for word in yay.split() if word not in (stopwords.words('english'))])
con += yay.encode("utf-8") + u"\n"
print con
| {
"content_hash": "4af6bc570e5b16bc65161f4264e80e88",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 98,
"avg_line_length": 23.08,
"alnum_prop": 0.6880415944540728,
"repo_name": "maddyloo/miniBibServer",
"id": "e53d8f7936033ea21aa584836df493005183d8ae",
"size": "617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "github_api_stuff/printnews.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13445"
},
{
"name": "HTML",
"bytes": "37674"
},
{
"name": "Python",
"bytes": "376444"
},
{
"name": "TeX",
"bytes": "38143"
}
],
"symlink_target": ""
} |
##########################################################################
#
# Copyright 2011 Jose Fonseca
# Copyright 2008-2010 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""GLX tracing generator."""
from gltrace import GlTracer
from specs.stdapi import Module, API
from specs.glapi import glapi
from specs.glxapi import glxapi
class GlxTracer(GlTracer):
def isFunctionPublic(self, function):
# The symbols visible in libGL.so can vary, so expose them all
return True
getProcAddressFunctionNames = [
"glXGetProcAddress",
"glXGetProcAddressARB",
]
createContextFunctionNames = [
'glXCreateContext',
'glXCreateContextAttribsARB',
'glXCreateContextWithConfigSGIX',
'glXCreateNewContext',
]
destroyContextFunctionNames = [
'glXDestroyContext',
]
makeCurrentFunctionNames = [
'glXMakeCurrent',
'glXMakeContextCurrent',
'glXMakeCurrentReadSGI',
]
def traceFunctionImplBody(self, function):
if function.name in self.destroyContextFunctionNames:
print ' gltrace::releaseContext((uintptr_t)ctx);'
GlTracer.traceFunctionImplBody(self, function)
if function.name in self.createContextFunctionNames:
print ' if (_result != NULL)'
print ' gltrace::createContext((uintptr_t)_result);'
if function.name in self.makeCurrentFunctionNames:
print ' if (_result) {'
print ' if (ctx != NULL)'
print ' gltrace::setContext((uintptr_t)ctx);'
print ' else'
print ' gltrace::clearContext();'
print ' }'
if function.name == 'glXBindTexImageEXT':
# FIXME: glXBindTexImageEXT gets called frequently, so we should
# avoid recording the same data over and over again somehow, e.g.:
# - get the pixels before and after glXBindTexImageEXT, and only
# emit emitFakeTexture2D when it changes
# - keep a global hash of the pixels
# FIXME: Handle mipmaps
print r'''
unsigned glx_target = 0;
_glXQueryDrawable(display, drawable, GLX_TEXTURE_TARGET_EXT, &glx_target);
GLenum target;
switch (glx_target) {
// FIXME
//case GLX_TEXTURE_1D_EXT:
// target = GL_TEXTURE_1D;
// break;
case GLX_TEXTURE_2D_EXT:
target = GL_TEXTURE_2D;
break;
case GLX_TEXTURE_RECTANGLE_EXT:
target = GL_TEXTURE_RECTANGLE;
break;
default:
os::log("apitrace: warning: %s: unsupported GLX_TEXTURE_TARGET_EXT 0x%u\n", __FUNCTION__, glx_target);
target = GL_NONE;
break;
}
GLint level = 0;
GLint internalformat = GL_NONE;
_glGetTexLevelParameteriv(target, level, GL_TEXTURE_INTERNAL_FORMAT, &internalformat);
// XXX: GL_TEXTURE_INTERNAL_FORMAT cannot be trusted on NVIDIA
// -- it sometimes returns GL_BGRA, even though GL_BGR/BGRA is
// not a valid internal format.
switch (internalformat) {
case GL_BGR:
internalformat = GL_RGB;
break;
case GL_BGRA:
internalformat = GL_RGBA;
break;
}
GLint width = 0;
_glGetTexLevelParameteriv(target, level, GL_TEXTURE_WIDTH, &width);
GLint height = 0;
_glGetTexLevelParameteriv(target, level, GL_TEXTURE_HEIGHT, &height);
GLint border = 0;
// XXX: We always use GL_RGBA format to read the pixels because:
// - some implementations (Mesa) seem to return bogus results
// for GLX_TEXTURE_FORMAT_EXT
// - hardware usually stores GL_RGB with 32bpp, so it should be
// faster to read/write
// - it is more robust against GL_(UN)PACK_ALIGNMENT state
// changes
// The drawback is that traces will be slightly bigger.
GLenum format = GL_RGBA;
GLenum type = GL_UNSIGNED_BYTE;
if (target && internalformat && height && width) {
// FIXME: This assumes (UN)PACK state (in particular
// GL_(UN)PACK_ROW_LENGTH) is set to its defaults. We
// really should temporarily reset the state here (and emit
// according fake calls) to cope when its not. At very
// least we need a heads up warning that this will cause
// problems.
GLint alignment = 4;
GLint row_stride = _align(width * 4, alignment);
GLvoid * pixels = malloc(height * row_stride);
_glGetTexImage(target, level, format, type, pixels);
'''
self.emitFakeTexture2D()
print r'''
free(pixels);
}
'''
if __name__ == '__main__':
print
print '#include <stdlib.h>'
print '#include <string.h>'
print
print '#include <dlfcn.h>'
print
print '#include "trace_writer_local.hpp"'
print
print '// To validate our prototypes'
print '#define GL_GLEXT_PROTOTYPES'
print '#define GLX_GLXEXT_PROTOTYPES'
print
print '#include "glproc.hpp"'
print '#include "glsize.hpp"'
print
module = Module()
module.mergeModule(glxapi)
module.mergeModule(glapi)
api = API()
api.addModule(module)
tracer = GlxTracer()
tracer.traceApi(api)
print r'''
/*
* Invoke the true dlopen() function.
*/
static void *_dlopen(const char *filename, int flag)
{
typedef void * (*PFN_DLOPEN)(const char *, int);
static PFN_DLOPEN dlopen_ptr = NULL;
if (!dlopen_ptr) {
dlopen_ptr = (PFN_DLOPEN)dlsym(RTLD_NEXT, "dlopen");
if (!dlopen_ptr) {
os::log("apitrace: error: dlsym(RTLD_NEXT, \"dlopen\") failed\n");
return NULL;
}
}
return dlopen_ptr(filename, flag);
}
/*
* Several applications, such as Quake3, use dlopen("libGL.so.1"), but
* LD_PRELOAD does not intercept symbols obtained via dlopen/dlsym, therefore
* we need to intercept the dlopen() call here, and redirect to our wrapper
* shared object.
*/
extern "C" PUBLIC
void * dlopen(const char *filename, int flag)
{
void *handle;
handle = _dlopen(filename, flag);
const char * libgl_filename = getenv("TRACE_LIBGL");
if (filename && handle && !libgl_filename) {
if (0) {
os::log("apitrace: warning: dlopen(\"%s\", 0x%x)\n", filename, flag);
}
// FIXME: handle absolute paths and other versions
if (strcmp(filename, "libGL.so") == 0 ||
strcmp(filename, "libGL.so.1") == 0) {
// Use the true libGL.so handle instead of RTLD_NEXT from now on
_libGlHandle = handle;
// Get the file path for our shared object, and use it instead
static int dummy = 0xdeedbeef;
Dl_info info;
if (dladdr(&dummy, &info)) {
os::log("apitrace: redirecting dlopen(\"%s\", 0x%x)\n", filename, flag);
handle = _dlopen(info.dli_fname, flag);
} else {
os::log("apitrace: warning: dladdr() failed\n");
}
}
}
return handle;
}
'''
| {
"content_hash": "6ddc4455501de7301cda0bb32a8c61e9",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 122,
"avg_line_length": 35.716,
"alnum_prop": 0.5627729868966289,
"repo_name": "PeterLValve/apitrace",
"id": "568eb1a129ad3db1d8d38e806c87abbe1efddd68",
"size": "8929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wrappers/glxtrace.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3706651"
},
{
"name": "C++",
"bytes": "2150305"
},
{
"name": "Emacs Lisp",
"bytes": "204"
},
{
"name": "Objective-C",
"bytes": "39268"
},
{
"name": "Python",
"bytes": "1655434"
},
{
"name": "Shell",
"bytes": "1400"
}
],
"symlink_target": ""
} |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from functools import partial
import collections
import json
import zkutil
class ZNodeMap(object):
"""Associate znodes with names."""
OLD_SEPARATOR = ' -> '
def __init__(self, zk, path):
"""
zk: KazooClient instance
path: znode to store associations
"""
self.zk = zk
self.path = path
zk.ensure_path(path)
def set(self, name, dest):
zmap, version = self._get()
zmap[name] = dest
self._set(zmap, version)
def get(self, name):
return self.get_all()[name]
def get_all(self):
"""returns a map of names to destinations."""
zmap, v = self._get()
return zmap
def delete(self, name):
zmap, version = self._get()
del zmap[name]
self._set(zmap, version)
def _get(self):
"""get and parse data stored in self.path."""
data, stat = self.zk.get(self.path)
if not len(data):
return {}, stat.version
if self.OLD_SEPARATOR in data:
return self._get_old()
return json.loads(data), stat.version
def _set(self, data, version):
"""serialize and set data to self.path."""
self.zk.set(self.path, json.dumps(data), version)
def _get_old(self):
"""get and parse data stored in self.path."""
def _deserialize(d):
if not len(d):
return {}
return dict(l.split(self.OLD_SEPARATOR) for l in d.split('\n'))
data, stat = self.zk.get(self.path)
return _deserialize(data.decode('utf8')), stat.version
class Env(unicode):
def __new__(cls, name):
if not name:
empty = True
name = ''
else:
assert name[0] != '/'
empty = False
s = unicode.__new__(cls, name)
s._empty = empty
return s
@property
def is_root(self):
return self._empty
@property
def components(self):
if self.is_root:
return ['']
else:
return self.split('/')
Env.Root = Env(None)
class Jones(object):
"""
Glossary:
view
refers to a node which has has the following algorithm applied
for node in root -> env
update view with node.config
environment
a node in the service graph
as passed to get/set config, it should identify
the node within the service
i.e. "production" or "dev/mwhooker"
"""
def __init__(self, service, zk):
self.zk = zk
self.service = service
self.root = "/services/%s" % service
self.conf_path = "%s/conf" % self.root
self.view_path = "%s/views" % self.root
self.associations = ZNodeMap(zk, "%s/nodemaps" % self.root)
self._get_env_path = partial(self._get_path_by_env, self.conf_path)
self._get_view_path = partial(self._get_path_by_env, self.view_path)
def create_config(self, env, conf):
"""
Set conf to env under service.
pass None to env for root.
"""
if not isinstance(conf, collections.Mapping):
raise ValueError("conf must be a collections.Mapping")
self.zk.ensure_path(self.view_path)
self._create(
self._get_env_path(env),
conf
)
self._update_view(env)
def set_config(self, env, conf, version):
"""
Set conf to env under service.
pass None to env for root.
"""
if not isinstance(conf, collections.Mapping):
raise ValueError("conf must be a collections.Mapping")
self._set(
self._get_env_path(env),
conf,
version
)
path = self._get_env_path(env)
"""Update env's children with new config."""
for child in zkutil.walk(self.zk, path):
self._update_view(Env(child[len(self.conf_path)+1:]))
def delete_config(self, env, version):
self.zk.delete(
self._get_env_path(env),
version
)
self.zk.delete(
self._get_view_path(env)
)
def get_config(self, hostname):
"""
Returns a configuration for hostname.
"""
version, config = self._get(
self.associations.get(hostname)
)
return config
def get_config_by_env(self, env):
"""
Get the config dictionary by `env`.
Returns a 2-tuple like (version, data).
"""
return self._get(
self._get_env_path(env)
)
def get_view_by_env(self, env):
"""
Returns the view of `env`.
"""
version, data = self._get(self._get_view_path(env))
return data
def assoc_host(self, hostname, env):
"""
Associate a host with an environment.
hostname is opaque to Jones.
Any string which uniquely identifies a host is acceptable.
"""
dest = self._get_view_path(env)
self.associations.set(hostname, dest)
def get_associations(self, env):
"""
Get all the associations for this env.
Root cannot have associations, so return None for root.
returns a map of hostnames to environments.
"""
if env.is_root:
return None
associations = self.associations.get_all()
return [assoc for assoc in associations
if associations[assoc] == self._get_view_path(env)]
def delete_association(self, hostname):
self.associations.delete(hostname)
def exists(self):
"""Does this service exist in zookeeper"""
return self.zk.exists(
self._get_env_path(Env.Root)
)
def delete_all(self):
self.zk.delete(self.root, recursive=True)
def get_child_envs(self, env):
prefix = self._get_env_path(env)
envs = zkutil.walk(self.zk, prefix)
return map(lambda e: e[len(prefix)+1:], envs)
def _flatten_from_root(self, env):
"""
Flatten values from root down in to new view.
"""
nodes = env.components
# Path through the znode graph from root ('') to env
path = [nodes[:n] for n in xrange(len(nodes) + 1)]
# Expand path and map it to the root
path = map(
self._get_env_path,
[Env('/'.join(p)) for p in path]
)
data = {}
for n in path:
_, config = self._get(n)
data.update(config)
return data
def _update_view(self, env):
dest = self._get_view_path(env)
if not self.zk.exists(dest):
self.zk.ensure_path(dest)
self._set(dest, self._flatten_from_root(env))
def _get_path_by_env(self, prefix, env):
if env.is_root:
return prefix
return '/'.join((prefix, env))
def _get_nodemap_path(self, hostname):
return "%s/%s" % (self.nodemap_path, hostname)
def _get(self, path):
data, metadata = self.zk.get(path)
return metadata.version, json.loads(data)
def _set(self, path, data, *args, **kwargs):
return self.zk.set(path, json.dumps(data), *args, **kwargs)
def _create(self, path, data, *args, **kwargs):
return self.zk.create(path, json.dumps(data), *args, **kwargs)
| {
"content_hash": "1541a3b15587a719002b93d8c33b0fb2",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 76,
"avg_line_length": 26.236842105263158,
"alnum_prop": 0.5577983951855566,
"repo_name": "mwhooker/jones",
"id": "1dc604f2766e318347b301a785f986da9cfb787f",
"size": "7976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jones/jones.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "11226"
},
{
"name": "CSS",
"bytes": "6881"
},
{
"name": "JavaScript",
"bytes": "22357"
},
{
"name": "Python",
"bytes": "30980"
},
{
"name": "Ruby",
"bytes": "7885"
}
],
"symlink_target": ""
} |
from django.core.exceptions import ValidationError
from django.db import models
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailsnippets.models import register_snippet
@register_snippet
class Category(models.Model):
name = models.CharField(_('Name'), max_length=80, unique=True)
slug = models.SlugField(unique=True, max_length=80)
parent = models.ForeignKey('self', blank=True, null=True, related_name="children",
help_text=_('Categories allows to sort your content according a hierarchy'))
description = models.CharField(max_length=500, blank=True)
class Meta:
ordering = ['name']
verbose_name = _('Category')
verbose_name_plural = _('Categories')
panels = [
FieldPanel('name'),
FieldPanel('parent'),
FieldPanel('description'),
]
def __str__(self):
return self.name
def clean(self):
if self.parent:
parent = self.parent
if self.parent == self:
raise ValidationError('Parent category cannot be self.')
if parent.parent and parent.parent == self:
raise ValidationError('Cannot have circular Parents.')
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
return super(Category, self).save(*args, **kwargs)
| {
"content_hash": "9b695c737f019c99d757abc75e3e7b90",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 107,
"avg_line_length": 35.92857142857143,
"alnum_prop": 0.6481113320079522,
"repo_name": "apihackers/wapps",
"id": "15f28daaf1f01188f93ac95b36b9c9540493745f",
"size": "1509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wapps/models/category.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "935"
},
{
"name": "HTML",
"bytes": "6935"
},
{
"name": "JavaScript",
"bytes": "5969"
},
{
"name": "Python",
"bytes": "183616"
},
{
"name": "Shell",
"bytes": "818"
},
{
"name": "Vue",
"bytes": "1969"
}
],
"symlink_target": ""
} |
from . import core as html5
@html5.tag
class Label(html5.Label):
_parserTagName = "ignite-label"
def __init__(self, *args, **kwargs):
super(Label, self).__init__(style="label ignt-label", *args, **kwargs)
@html5.tag
class Input(html5.Input):
_parserTagName = "ignite-input"
def __init__(self, *args, **kwargs):
super(Input, self).__init__(style="input ignt-input", *args, **kwargs)
@html5.tag
class Switch(html5.Div):
_parserTagName = "ignite-switch"
def __init__(self, *args, **kwargs):
super(Switch, self).__init__(style="switch ignt-switch", *args, **kwargs)
self.input = html5.Input(style="switch-input")
self.appendChild(self.input)
self.input["type"] = "checkbox"
switchLabel = html5.Label(forElem=self.input)
switchLabel.addClass("switch-label")
self.appendChild(switchLabel)
def _setChecked(self, value):
self.input["checked"] = bool(value)
def _getChecked(self):
return self.input["checked"]
@html5.tag
class Check(html5.Input):
_parserTagName = "ignite-check"
def __init__(self, *args, **kwargs):
super(Check, self).__init__(style="check ignt-check", *args, **kwargs)
checkInput = html5.Input()
checkInput.addClass("check-input")
checkInput["type"] = "checkbox"
self.appendChild(checkInput)
checkLabel = html5.Label(forElem=checkInput)
checkLabel.addClass("check-label")
self.appendChild(checkLabel)
@html5.tag
class Radio(html5.Div):
_parserTagName = "ignite-radio"
def __init__(self, *args, **kwargs):
super(Radio, self).__init__(style="radio ignt-radio", *args, **kwargs)
radioInput = html5.Input()
radioInput.addClass("radio-input")
radioInput["type"] = "radio"
self.appendChild(radioInput)
radioLabel = html5.Label(forElem=radioInput)
radioLabel.addClass("radio-label")
self.appendChild(radioLabel)
@html5.tag
class Select(html5.Select):
_parserTagName = "ignite-select"
def __init__(self, *args, **kwargs):
super(Select, self).__init__(style="select ignt-select", *args, **kwargs)
defaultOpt = html5.Option()
defaultOpt["selected"] = True
defaultOpt["disabled"] = True
defaultOpt.element.innerHTML = ""
self.appendChild(defaultOpt)
@html5.tag
class Textarea(html5.Textarea):
_parserTagName = "ignite-textarea"
def __init__(self, *args, **kwargs):
super(Textarea, self).__init__(style="textarea ignt-textarea", *args, **kwargs)
@html5.tag
class Progress(html5.Progress):
_parserTagName = "ignite-progress"
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(style="progress ignt-progress", *args, **kwargs)
@html5.tag
class Item(html5.Div):
_parserTagName = "ignite-item"
def __init__(self, title=None, descr=None, className=None, *args, **kwargs):
super(Item, self).__init__(style="item ignt-item", *args, **kwargs)
if className:
self.addClass(className)
self.fromHTML("""
<div class="item-image ignt-item-image" [name]="itemImage">
</div>
<div class="item-content ignt-item-content" [name]="itemContent">
<div class="item-headline ignt-item-headline" [name]="itemHeadline">
</div>
</div>
""")
if title:
self.itemHeadline.appendChild(html5.TextNode(title))
if descr:
self.itemSubline = html5.Div()
self.addClass("item-subline ignt-item-subline")
self.itemSubline.appendChild(html5.TextNode(descr))
self.appendChild(self.itemSubline)
@html5.tag
class Table(html5.Table):
_parserTagName = "ignite-table"
def __init__(self, *args, **kwargs):
super(Table, self).__init__(*args, **kwargs)
self.head.addClass("ignt-table-head")
self.body.addClass("ignt-table-body")
def prepareRow(self, row):
assert row >= 0, "Cannot create rows with negative index"
for child in self.body._children:
row -= child["rowspan"]
if row < 0:
return
while row >= 0:
tableRow = html5.Tr()
tableRow.addClass("ignt-table-body-row")
self.body.appendChild(tableRow)
row -= 1
def prepareCol(self, row, col):
assert col >= 0, "Cannot create cols with negative index"
self.prepareRow(row)
for rowChild in self.body._children:
row -= rowChild["rowspan"]
if row < 0:
for colChild in rowChild._children:
col -= colChild["colspan"]
if col < 0:
return
while col >= 0:
tableCell = html5.Td()
tableCell.addClass("ignt-table-body-cell")
rowChild.appendChild(tableCell)
col -= 1
return
def fastGrid( self, rows, cols, createHidden=False ):
colsstr = "".join(['<td class="ignt-table-body-cell"></td>' for i in range(0, cols)])
tblstr = '<tbody [name]="body" class="ignt-table-body" >'
for r in range(0, rows):
tblstr += '<tr class="ignt-table-body-row %s">%s</tr>' %("is-hidden" if createHidden else "",colsstr)
tblstr +="</tbody>"
self.fromHTML(tblstr)
| {
"content_hash": "e3819451cb4d33e589bd3960a81aee38",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 104,
"avg_line_length": 25.643243243243244,
"alnum_prop": 0.6736930860033726,
"repo_name": "erezsh/lark",
"id": "61c10a068eeffcd6b5751ab1b079e1d1a5fd7f59",
"size": "4768",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/ide/app/ignite.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GAP",
"bytes": "684"
},
{
"name": "Nearley",
"bytes": "44"
},
{
"name": "Python",
"bytes": "177298"
}
],
"symlink_target": ""
} |
''' Provides the ``ServerSession`` class.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
import time
from tornado import gen, locks
from ..util.tornado import yield_for_all_futures
from .callbacks import _DocumentCallbackGroup
def current_time():
'''Return the time in milliseconds since the epoch as a floating
point number.
'''
try:
# python >=3.3 only
return time.monotonic() * 1000
except:
# if your python is old, don't set your clock backward!
return time.time() * 1000
def _needs_document_lock(func):
'''Decorator that adds the necessary locking and post-processing
to manipulate the session's document. Expects to decorate a
method on ServerSession and transforms it into a coroutine
if it wasn't already.
'''
@gen.coroutine
def _needs_document_lock_wrapper(self, *args, **kwargs):
# while we wait for and hold the lock, prevent the session
# from being discarded. This avoids potential weirdness
# with the session vanishing in the middle of some async
# task.
self.block_expiration()
try:
with (yield self._lock.acquire()):
if self._pending_writes is not None:
raise RuntimeError("internal class invariant violated: _pending_writes " + \
"should be None if lock is not held")
self._pending_writes = []
try:
result = yield yield_for_all_futures(func(self, *args, **kwargs))
finally:
# we want to be very sure we reset this or we'll
# keep hitting the RuntimeError above as soon as
# any callback goes wrong
pending_writes = self._pending_writes
self._pending_writes = None
for p in pending_writes:
yield p
raise gen.Return(result)
finally:
self.unblock_expiration()
return _needs_document_lock_wrapper
class ServerSession(object):
''' Hosts an application "instance" (an instantiated Document) for one or more connections.
'''
def __init__(self, session_id, document, io_loop=None):
if session_id is None:
raise ValueError("Sessions must have an id")
if document is None:
raise ValueError("Sessions must have a document")
self._id = session_id
self._document = document
self._loop = io_loop
self._subscribed_connections = set()
self._last_unsubscribe_time = current_time()
self._lock = locks.Lock()
self._current_patch_connection = None
self._document.on_change_dispatch_to(self)
self._callbacks = _DocumentCallbackGroup(io_loop)
self._pending_writes = None
self._destroyed = False
self._expiration_requested = False
self._expiration_blocked_count = 0
wrapped_callbacks = self._wrap_session_callbacks(self._document.session_callbacks)
self._callbacks.add_session_callbacks(wrapped_callbacks)
@property
def document(self):
return self._document
@property
def id(self):
return self._id
@property
def destroyed(self):
return self._destroyed
@property
def expiration_requested(self):
return self._expiration_requested
@property
def expiration_blocked(self):
return self._expiration_blocked_count > 0
@property
def expiration_blocked_count(self):
return self._expiration_blocked_count
def destroy(self):
self._destroyed = True
self._document.remove_on_change(self)
self._callbacks.remove_all_callbacks()
def request_expiration(self):
""" Used in test suite for now. Forces immediate expiration if no connections."""
self._expiration_requested = True
def block_expiration(self):
self._expiration_blocked_count += 1
def unblock_expiration(self):
if self._expiration_blocked_count <= 0:
raise RuntimeError("mismatched block_expiration / unblock_expiration")
self._expiration_blocked_count -= 1
def subscribe(self, connection):
"""This should only be called by ServerConnection.subscribe_session or our book-keeping will be broken"""
self._subscribed_connections.add(connection)
def unsubscribe(self, connection):
"""This should only be called by ServerConnection.unsubscribe_session or our book-keeping will be broken"""
self._subscribed_connections.discard(connection)
self._last_unsubscribe_time = current_time()
@property
def connection_count(self):
return len(self._subscribed_connections)
@property
def milliseconds_since_last_unsubscribe(self):
return current_time() - self._last_unsubscribe_time
@_needs_document_lock
def with_document_locked(self, func, *args, **kwargs):
''' Asynchronously locks the document and runs the function with it locked.'''
return func(*args, **kwargs)
def _wrap_document_callback(self, callback):
if getattr(callback, "nolock", False):
return callback
def wrapped_callback(*args, **kwargs):
return self.with_document_locked(callback, *args, **kwargs)
return wrapped_callback
def _wrap_session_callback(self, callback):
wrapped = self._wrap_document_callback(callback.callback)
return callback._copy_with_changed_callback(wrapped)
def _wrap_session_callbacks(self, callbacks):
wrapped = []
for cb in callbacks:
wrapped.append(self._wrap_session_callback(cb))
return wrapped
def _document_patched(self, event):
may_suppress = event.setter is self
if self._pending_writes is None:
raise RuntimeError("_pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes")
# TODO (havocp): our "change sync" protocol is flawed
# because if both sides change the same attribute at the
# same time, they will each end up with the state of the
# other and their final states will differ.
for connection in self._subscribed_connections:
if may_suppress and connection is self._current_patch_connection:
pass #log.debug("Not sending notification back to client %r for a change it requested", connection)
else:
self._pending_writes.append(connection.send_patch_document(event))
@_needs_document_lock
def _handle_pull(self, message, connection):
log.debug("Sending pull-doc-reply from session %r", self.id)
return connection.protocol.create('PULL-DOC-REPLY', message.header['msgid'], self.document)
def _session_callback_added(self, event):
wrapped = self._wrap_session_callback(event.callback)
self._callbacks.add_session_callback(wrapped)
def _session_callback_removed(self, event):
self._callbacks.remove_session_callback(event.callback)
@classmethod
def pull(cls, message, connection):
''' Handle a PULL-DOC, return a Future with work to be scheduled. '''
return connection.session._handle_pull(message, connection)
@_needs_document_lock
def _handle_push(self, message, connection):
log.debug("pushing doc to session %r", self.id)
message.push_to_document(self.document)
return connection.ok(message)
@classmethod
def push(cls, message, connection):
''' Handle a PUSH-DOC, return a Future with work to be scheduled. '''
return connection.session._handle_push(message, connection)
@_needs_document_lock
def _handle_patch(self, message, connection):
self._current_patch_connection = connection
try:
message.apply_to_document(self.document, self)
finally:
self._current_patch_connection = None
return connection.ok(message)
@classmethod
def patch(cls, message, connection):
''' Handle a PATCH-DOC, return a Future with work to be scheduled. '''
return connection.session._handle_patch(message, connection)
| {
"content_hash": "4a4cbf479bef89bc9f9529f301b12285",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 152,
"avg_line_length": 37.01327433628319,
"alnum_prop": 0.6411237298266587,
"repo_name": "aiguofer/bokeh",
"id": "4ecf24bef33efa78993713c6c7292570a4ce30df",
"size": "8365",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bokeh/server/session.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1442"
},
{
"name": "CSS",
"bytes": "92824"
},
{
"name": "CoffeeScript",
"bytes": "1083539"
},
{
"name": "HTML",
"bytes": "46812"
},
{
"name": "JavaScript",
"bytes": "31782"
},
{
"name": "Jupyter Notebook",
"bytes": "3981"
},
{
"name": "Makefile",
"bytes": "1164"
},
{
"name": "Python",
"bytes": "2334120"
},
{
"name": "Shell",
"bytes": "3660"
},
{
"name": "TypeScript",
"bytes": "106636"
}
],
"symlink_target": ""
} |
import sys
import json
import requests
import socket
import md5
from os.path import getmtime
from time import time
from tempfile import gettempdir
nodeName = socket.gethostname()
#print nodeName
url = 'http://127.0.0.1:8500/v1/health/node/{0}'.format(nodeName)
#print url
CACHE_TIMEOUT=60
TEMP_FILE_NAME="{}/consul-zabbix-checker-cache.dat".format(gettempdir())
def is_cache_valid():
# check if cache file is exist
try:
mtime = getmtime(TEMP_FILE_NAME)
except OSError as e:
return False
# is this file was created within default interval?
if (int(time()) - mtime) > CACHE_TIMEOUT:
return False
return True
def getDiscovery():
discovery_list = {}
discovery_list['data'] = []
nodeServices = requests.get(url).text
services = json.loads(nodeServices)
for service in services:
if service['CheckID'] != 'serfHealth':
#print service['Status']
#print service['ServiceName']
zbx_item = {"{#SERVICEID}": service['ServiceID']}
discovery_list['data'].append(zbx_item)
print json.dumps(discovery_list, indent=4, sort_keys=True)
def getStatus(ServiceID):
if not is_cache_valid():
nodeServices = requests.get(url).text
try:
open(TEMP_FILE_NAME, mode="w").write(nodeServices)
except OSError as e:
pass
services = json.loads(nodeServices)
else:
try:
services = json.loads(open(TEMP_FILE_NAME, mode='r').read())
except IOError as e:
status=0
print status
return
status = 0
for service in services:
if service['ServiceID'] == ServiceID:
if service['Status'] == 'passing':
status = 1
else:
status = 0
print status
action = sys.argv[1].lower()
if action == 'discovery':
getDiscovery()
elif action == 'status':
serviceID = sys.argv[2]
getStatus(serviceID)
| {
"content_hash": "23adc19c682ca890e806791fb4410979",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 72,
"avg_line_length": 25.53846153846154,
"alnum_prop": 0.6094377510040161,
"repo_name": "Zoomdata/zoomdata-tools",
"id": "edff39d50deb8ae74378544f0a072a742a010b66",
"size": "2011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "monitoring-templates/zabbix/consul-service-states/consul2zabbix.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "920"
},
{
"name": "Python",
"bytes": "87797"
},
{
"name": "Shell",
"bytes": "55778"
}
],
"symlink_target": ""
} |
"""Data generators for translation data-sets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.data_generators import text_problems
from tensor2tensor.data_generators import translate
from tensor2tensor.utils import registry
# End-of-sentence marker.
EOS = text_encoder.EOS_ID
_ENES_TRAIN_DATASETS = [
[
"http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz",
("commoncrawl.es-en.en", "commoncrawl.es-en.es")
],
[
"http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz",
("training/europarl-v7.es-en.en", "training/europarl-v7.es-en.es")
],
[
"http://www.statmt.org/wmt13/training-parallel-un.tgz",
("un/undoc.2000.es-en.en", "un/undoc.2000.es-en.es")
],
[
"https://s3.amazonaws.com/web-language-models/paracrawl/release1/paracrawl-release1.en-es.zipporah0-dedup-clean.tgz",
("paracrawl-release1.en-es.zipporah0-dedup-clean.en",
"paracrawl-release1.en-es.zipporah0-dedup-clean.es")
]
]
_ENES_TEST_DATASETS = [
[
"http://data.statmt.org/wmt17/translation-task/dev.tgz",
("dev/newstest2013.en", "dev/newstest2013.es")
],
]
@registry.register_problem
class TranslateEnesWmt32k(translate.TranslateProblem):
"""En-es translation trained on WMT corpus."""
@property
def additional_training_datasets(self):
"""Allow subclasses to add training datasets."""
return []
def source_data_files(self, dataset_split):
train = dataset_split == problem.DatasetSplit.TRAIN
train_datasets = _ENES_TRAIN_DATASETS + self.additional_training_datasets
return train_datasets if train else _ENES_TEST_DATASETS
def vocab_data_files(self):
return _ENES_TRAIN_DATASETS
@registry.register_problem
class TranslateEnesWmtClean32k(TranslateEnesWmt32k):
"""En-es translation trained on WMT with further cleaning."""
@property
def use_vocab_from_other_problem(self):
return TranslateEnesWmt32k()
@property
def datatypes_to_clean(self):
return ["txt"]
@registry.register_problem
class TranslateEnesWmt32kPacked(TranslateEnesWmt32k):
@property
def packed_length(self):
return 256
@property
def use_vocab_from_other_problem(self):
return TranslateEnesWmt32k()
@registry.register_problem
class TranslateEnesWmt8k(TranslateEnesWmt32k):
"""Problem spec for WMT En-Es translation."""
@property
def approx_vocab_size(self):
return 2**13 # 8192
@registry.register_problem
class TranslateEnesWmt8kPacked(TranslateEnesWmt8k):
@property
def packed_length(self):
return 256
@property
def use_vocab_from_other_problem(self):
return TranslateEnesWmt8k()
@registry.register_problem
class TranslateEnesWmtCharacters(TranslateEnesWmt8k):
"""Problem spec for WMT En-Es translation."""
@property
def vocab_type(self):
return text_problems.VocabType.CHARACTER
| {
"content_hash": "27b37362b627b805dc5def741f0d010e",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 125,
"avg_line_length": 27.401785714285715,
"alnum_prop": 0.7194525904203324,
"repo_name": "tensorflow/tensor2tensor",
"id": "f4a7f2199e69372822379d693a88509932d66f18",
"size": "3675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensor2tensor/data_generators/translate_enes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "32015"
},
{
"name": "HTML",
"bytes": "34684"
},
{
"name": "JavaScript",
"bytes": "78408"
},
{
"name": "Jupyter Notebook",
"bytes": "2859453"
},
{
"name": "Python",
"bytes": "5109255"
},
{
"name": "Shell",
"bytes": "11941"
}
],
"symlink_target": ""
} |
import logging
from django.contrib.auth import authenticate
from rest_framework.exceptions import AuthenticationFailed
from tsbp import consts
from tsbp_auth import models
from tsbp_core import models as core, exceptions
from tsbp_core.processors import AbstractProcessor
signup_logger = logging.getLogger('tsbp_auth.processors.signup_processor')
token_generation_logger = logging.getLogger('tsbp_auth.processors.token_generation_processor')
class SignupProcessor(AbstractProcessor):
operation_id = consts.operations.SIGN_UP
token = None
def execution(self, email, password, **kwargs):
signup_logger.info('Iniciando proceso de registro de usuario: email=[%s]' % email)
if core.User.objects.is_used_email(email):
signup_logger.info('El email [%s] ya está registrado' % email)
raise exceptions.PreconditionException('user_already_exists')
user = core.User.objects.create_user(email, email, password, creation_process=self.process)
token_generation_processor = TokenGenerationProcessor(self.process.issued_by_id, parent=self.process)
token_generation_processor.execute(user=user)
signup_logger.info("Usuario registrado exitosamente")
return consts.operation_results.SUCCESS
class TokenGenerationProcessor(AbstractProcessor):
operation_id = consts.operations.TOKEN_GENERATION
token = None
def execution(self, email=None, password=None, user=None, **kwargs):
token_generation_logger.info('Iniciando proceso de generación de token de autenticación...')
if user is None:
token_generation_logger.info('Usando credenciales de: email=[%s]' % email)
user = authenticate(username=email, password=password)
if user or not user.is_active:
raise AuthenticationFailed()
else:
token_generation_logger.info('Creando en base a un usuario directamente: email=[%s]' % user.email)
self.token = models.Token.objects.create(user=user, creation_process=self.process)
token_generation_logger.info('Token generado exitosamente')
return consts.operation_results.SUCCESS
| {
"content_hash": "11289a0aa03c05867bb7717124dd9799",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 110,
"avg_line_length": 46.12765957446808,
"alnum_prop": 0.7260147601476015,
"repo_name": "lvercelli/pytsbp",
"id": "358fedeb4970b0330e9116858fcd3e73ef53a87a",
"size": "2171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tsbp_auth/processors/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "59742"
}
],
"symlink_target": ""
} |
class StopCommand(RuntimeError):
"""
An exception that can be raise to halt the execution of a command, marking
a user error somewhere.
"""
pass
class StopCommandWithHelp(StopCommand):
"""
A convinience StopCommandException which contains the plugin help as the
message.
"""
def __init__(self, plugin):
super().__init__(plugin.on_help())
| {
"content_hash": "f715af53bb7fc0fe1acd326f487726eb",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 78,
"avg_line_length": 25.866666666666667,
"alnum_prop": 0.654639175257732,
"repo_name": "Cyanogenoid/smartbot",
"id": "4bf3f45b35facd724a3a2057fb6ccc2df1d1ddc7",
"size": "388",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "smartbot/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "109234"
}
],
"symlink_target": ""
} |
"""Sends the password over a socket to askpass.
"""
import errno
import getpass
import os
import socket
import sys
import tempfile
import textwrap
from psshlib import psshutil
class PasswordServer(object):
"""Listens on a UNIX domain socket for password requests."""
def __init__(self):
self.sock = None
self.tempdir = None
self.address = None
self.socketmap = {}
self.buffermap = {}
def start(self, iomap, backlog):
"""Prompts for the password, creates a socket, and starts listening.
The specified backlog should be the max number of clients connecting
at once.
"""
message = ('Warning: do not enter your password if anyone else has'
' superuser privileges or access to your account.')
print(textwrap.fill(message))
self.password = getpass.getpass()
# Note that according to the docs for mkdtemp, "The directory is
# readable, writable, and searchable only by the creating user."
self.tempdir = tempfile.mkdtemp(prefix='pssh.')
self.address = os.path.join(self.tempdir, 'pssh_askpass_socket')
self.sock = socket.socket(socket.AF_UNIX)
psshutil.set_cloexec(self.sock)
self.sock.bind(self.address)
self.sock.listen(backlog)
iomap.register_read(self.sock.fileno(), self.handle_listen)
def handle_listen(self, fd, iomap):
try:
conn = self.sock.accept()[0]
except socket.error:
_, e, _ = sys.exc_info()
number = e.args[0]
if number == errno.EINTR:
return
else:
# TODO: print an error message here?
self.sock.close()
self.sock = None
fd = conn.fileno()
iomap.register_write(fd, self.handle_write)
self.socketmap[fd] = conn
self.buffermap[fd] = self.password.encode()
def handle_write(self, fd, iomap):
buffer = self.buffermap[fd]
conn = self.socketmap[fd]
try:
bytes_written = conn.send(buffer)
except socket.error:
_, e, _ = sys.exc_info()
number = e.args[0]
if number == errno.EINTR:
return
else:
self.close_socket(fd, iomap)
buffer = buffer[bytes_written:]
if buffer:
self.buffermap[fd] = buffer
else:
self.close_socket(fd, iomap)
def close_socket(self, fd, iomap):
iomap.unregister(fd)
self.socketmap[fd].close()
del self.socketmap[fd]
del self.buffermap[fd]
def __del__(self):
if self.sock:
self.sock.close()
self.sock = None
if self.address:
os.remove(self.address)
if self.tempdir:
os.rmdir(self.tempdir)
| {
"content_hash": "ee8a41c6f4fd6e45e6e9a5dd5e9b78b6",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 76,
"avg_line_length": 30.020833333333332,
"alnum_prop": 0.5700902151283831,
"repo_name": "krig/parallel-ssh",
"id": "a2f38d3cf764fc29ede27921b8dc6f505e352c6b",
"size": "2948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psshlib/askpass_server.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "46"
},
{
"name": "Python",
"bytes": "86599"
}
],
"symlink_target": ""
} |
"""
This module implements error handlers for Nwchem runs. Currently tested only
for B3LYP DFT jobs.
"""
from pymatgen.io.nwchem import NwInput, NwOutput
from custodian.ansible.interpreter import Modder
from custodian.custodian import ErrorHandler
from custodian.utils import backup
class NwchemErrorHandler(ErrorHandler):
"""
Error handler for Nwchem Jobs. Currently tested only for B3LYP DFT jobs
generated by pymatgen.
"""
def __init__(self, output_filename="mol.nwout"):
"""
Initializes with an output file name.
Args:
output_filename (str): This is the file where the stdout for nwchem
is being redirected. The error messages that are checked are
present in the stdout. Defaults to "mol.nwout", which is the
default redirect used by :class:`custodian.nwchem.jobs
.NwchemJob`.
"""
self.output_filename = output_filename
def check(self):
"""
Check for errors.
"""
out = NwOutput(self.output_filename)
self.errors = []
self.input_file = out.job_info["input"]
if out.data[-1]["has_error"]:
self.errors.extend(out.data[-1]["errors"])
self.errors = list(set(self.errors))
self.ntasks = len(out.data)
return len(self.errors) > 0
def _mod_input(self, search_string_func, mod_string_func):
with open(self.input_file) as f:
lines = []
for l in f:
if search_string_func(l):
lines.append(mod_string_func(l))
else:
lines.append(l)
with open(self.input_file, "w") as fout:
fout.write("".join(lines))
def correct(self):
"""Correct errors"""
backup("*.nw*")
actions = []
nwi = NwInput.from_file(self.input_file)
for e in self.errors:
if e == "autoz error":
action = {"_set": {"geometry_options": ["units", "angstroms", "noautoz"]}}
actions.append(action)
elif e == "Bad convergence":
t = nwi.tasks[self.ntasks - 1]
if "cgmin" in t.theory_directives:
nwi.tasks.pop(self.ntasks - 1)
else:
t.theory_directives["cgmin"] = ""
for t in nwi.tasks:
if t.operation.startswith("freq"):
# You cannot calculate hessian with cgmin.
t.theory_directives["nocgmin"] = ""
action = {"_set": {"tasks": [t.as_dict() for t in nwi.tasks]}}
actions.append(action)
else:
# For unimplemented errors, this should just cause the job to
# die.
return {"errors": self.errors, "actions": None}
m = Modder()
for action in actions:
nwi = m.modify_object(action, nwi)
nwi.write_file(self.input_file)
return {"errors": self.errors, "actions": actions}
def __str__(self):
return "NwchemErrorHandler"
| {
"content_hash": "8abde67d106a1157c190cf00628b15ad",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 90,
"avg_line_length": 34.955555555555556,
"alnum_prop": 0.5432294977749523,
"repo_name": "materialsproject/custodian",
"id": "8fc789a47974444f5b7a6cdad95d4593ba7a3f22",
"size": "3146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "custodian/nwchem/handlers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5100"
},
{
"name": "CSS",
"bytes": "1133"
},
{
"name": "HTML",
"bytes": "2710"
},
{
"name": "Makefile",
"bytes": "5577"
},
{
"name": "Python",
"bytes": "532313"
},
{
"name": "Roff",
"bytes": "1552939"
},
{
"name": "Shell",
"bytes": "7472"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import json
import os.path
import time
from datetime import datetime, timedelta
import soco
from MusicLogging import MusicLogging
def str2bool(v):
return v in ("yes", "true", "t", "1", "Yes", "True", True)
class SonosController(object):
def __init__(self, dayVol=25, nightVol=15, daytimeRange=None, unjoin=True, clear=True,
settingsFolder="settings", restartTime=10):
if daytimeRange is None:
daytimeRange = [7, 17]
self.dayVol = float(dayVol)
self.nightVol = float(nightVol)
self.daytimeRange = daytimeRange.split('-')
self.unjoinBool = str2bool(unjoin)
self.clear = str2bool(clear)
self.settings = settingsFolder
self.restartTime = float(restartTime)
self.speakers = None
self.sonosDevice = None
self.lastSavedTag = None
def startSonos(self, speaker):
# Sonos setup
MusicLogging.Instance().info("Connecting to Sonos...")
self.speakers = soco.discover()
try:
self.sonosDevice = soco.discovery.by_name(speaker)
except:
self.sonosDevice = self.speakers.pop()
return True
def stopAll(self):
MusicLogging.Instance().info("Stopping Sonos")
def getCache(self, entry):
playitems = self.loadPlayList(entry)
return soco.data_structures.to_didl_string(*playitems)
def getPlayList(self, entry):
playitems = entry['playitems']
if playitems and not playitems == '':
return soco.data_structures_entry.from_didl_string(playitems)
else:
return self.loadPlayList(entry)
def loadPlayList(self, entry):
returnItems = []
if entry['type'] == 'url':
returnItems = self.getUrlCache(entry)
elif entry['type'] == 'playlist':
returnItems = self.getPlaylistCache(entry)
elif entry['type'] == 'album':
returnItems = self.getAlbumCache(entry)
elif entry['type'] == 'artist':
returnItems = self.getArtistCache(entry)
elif entry['type'] == 'genre':
returnItems = self.getGenreCache(entry)
return returnItems
def getPlaylistCache(self, entry):
return self.getMusicLibraryInformationCache('sonos_playlists', entry, 'playlist')
def getAlbumCache(self, entry):
return self.getMusicLibraryInformationCache('albums', entry, 'album')
def getArtistCache(self, entry):
return self.getMusicLibraryInformationCache('album_artists', entry, 'artist')
def getGenreCache(self, entry):
return self.getMusicLibraryInformationCache('genres', entry, 'genre')
@staticmethod
def getUrlCache(entry):
item = soco.data_structures.DidlResource(uri=entry['item'], protocol_info='*:*:*:*')
return list([item])
def getMusicLibraryInformationCache(self, searchType, entry, valueType):
returnItems = list([])
# noinspection PyUnusedLocal
startItem = 0
startAtIndex = 0
while True:
try:
# self.music_library.get_music_library_information('albums', start, max_items, full_album_art_uri)
playlist_info = self.sonosDevice.music_library.get_music_library_information(searchType,
start=startAtIndex,
max_items=100)
returnedItems = playlist_info.number_returned
except:
MusicLogging.Instance().info("some error")
break
if returnedItems == 0:
break
if entry['type'] == valueType:
for playlist in playlist_info:
playlistTitle = playlist.title
if playlistTitle == entry['item']:
MusicLogging.Instance().info('found ' + entry['item'])
try:
track_list = self.sonosDevice.music_library.browse(playlist)
returnItems.extend(track_list)
except:
MusicLogging.Instance().info("some error")
startAtIndex += returnedItems
# playlist_info = #sonos.music_library.get_music_library_information('sonos_playlists',search_term='Shovels And Rope')
# MusicLogging.Instance().info('Fonud {} Sonos playlists'.format(playlist_info.number_returned))
return returnItems
def playItems(self, items):
startPlaying = False
for item in items:
try:
self.sonosDevice.add_to_queue(item)
except:
MusicLogging.Instance().info(" error adding...: " + sys.exc_info()[0])
if startPlaying == False:
try:
startPlaying = self.sonosDevice.play_from_queue(0, start=True)
# startPlaying = self.sonos.play()
MusicLogging.Instance().info(" Playing...")
except:
MusicLogging.Instance().info(" error starting to play...")
if startPlaying == False:
try:
# startPlaying = self.sonos.play_from_queue(0, start=True)
# noinspection PyUnusedLocal
startPlaying = self.sonosDevice.play()
MusicLogging.Instance().info(" Playing...")
except:
MusicLogging.Instance().info(" error starting to play...")
return True
# this function gets called when a NFC tag is detected
def play(self, entry):
playItems = self.getPlayList(entry)
# restart if last Tag is the same
lastTag = self.lastTag()
if not lastTag is None:
theTimeDelta = datetime.now() - lastTag["scan"]
MusicLogging.Instance().info("time delta: " + str(theTimeDelta))
if theTimeDelta < timedelta(seconds=self.restartTime):
if entry['tag_id'] == lastTag["tag"]:
if self.restart():
return True
self.unjoin()
self.clearQueue()
if entry['volume']:
self.setMaxVolume(entry['volume'])
if entry['type'] == "artist":
self.playModeShuffleNoRepeat()
elif entry['type'] == "genre":
self.playModeShuffleNoRepeat()
else:
self.playModeNormal()
self.playItems(playItems)
if entry['time_offset']:
self.setSkipTo(entry['time_offset'])
self.setLastTag(entry['tag_id'])
return True
def lastTag(self):
try:
# noinspection PyUnusedLocal
var = self.lastSavedTag
MusicLogging.Instance().info("last Tag found")
except:
MusicLogging.Instance().info("last Tag not found")
self.lastSavedTag = None
return self.lastSavedTag
def setLastTag(self, tag_uid):
MusicLogging.Instance().info("last Tag saved")
self.lastSavedTag = {"tag": tag_uid, "scan": datetime.now()}
def saveLastTagTime(self):
# only save when a track is playing
try:
transportInfo = self.sonosDevice.get_current_transport_info()
if transportInfo['current_transport_state'] == 'PLAYING':
lastTag = self.lastTag()
if not lastTag is None:
self.lastSavedTag = {"tag": lastTag["tag"], "scan": datetime.now()}
MusicLogging.Instance().info("last tag time saved")
return True
else:
MusicLogging.Instance().info("did not save last tag time")
return False
else:
MusicLogging.Instance().info("music is currently not playing")
return False
except:
return False
def markUnknown(self, aTagUid):
tag_uid = str(aTagUid)
MusicLogging.Instance().info(" No record for tag UID: " + tag_uid)
aUnknownTag = {
tag_uid: {'comment': 'last scan: ' + str(datetime.now()), 'title': '', 'vol': 1, 'time_offset': None,
'type': None, 'item': None}, }
unknownFileName = self.settings + "/" + "2_unknown_Tags" + ".txt"
if os.path.isfile(unknownFileName):
unknownTagsJSON = open(unknownFileName)
unknownTags = json.load(unknownTagsJSON)
unknownTagsJSON.close()
else:
unknownTags = {}
unknownTags.update(aUnknownTag)
dump = json.dumps(unknownTags, sort_keys=True, indent=4, separators=(',', ': '))
fobj_out = open(unknownFileName, "w")
fobj_out.write(dump)
fobj_out.close()
# sonos methods
def setSkipTo(self, time_offset=None):
if time_offset:
try:
self.sonosDevice.seek(time_offset)
MusicLogging.Instance().info(" Skipped to " + time_offset)
return True
except:
return False
return False
def setMaxVolume(self, volModifier=1):
self.setVolume(self.sonosVolume(), volModifier)
return True
def setVolume(self, newVolume, volModifier):
MusicLogging.Instance().info(" setting Volume to:" + str(newVolume))
currentHour = time.localtime()[3]
isNight = 0
if currentHour < float(self.daytimeRange[0]) or currentHour > float(self.daytimeRange[1]): # is it nighttime?
isNight = 1
if isNight:
maxVol = int(round(self.nightVol * volModifier, 0))
if newVolume >= maxVol:
self.setSonosVolume(maxVol)
MusicLogging.Instance().info(" " + str(newVolume) + " is to loud for Nighttime volume, setting it to " + str(maxVol))
elif newVolume < maxVol:
self.setSonosVolume(newVolume)
else:
maxVol = int(round(self.dayVol * volModifier, 0))
if newVolume >= maxVol:
self.setSonosVolume(maxVol)
MusicLogging.Instance().info(" " + str(newVolume) + " is to loud for Daytime volume, setting it to " + str(maxVol))
elif newVolume < maxVol:
self.setSonosVolume(newVolume)
return True
def setSonosVolume(self, volume):
try:
self.sonosDevice.volume = volume
except:
MusicLogging.Instance().info("some error")
return True
def sonosVolume(self):
volume = 0
try:
volume = self.sonosDevice.volume
except:
MusicLogging.Instance().info("some error")
return volume
def volumeUp(self, numberOfSteps):
newVolume = self.sonosVolume() + numberOfSteps
self.setVolume(newVolume, 1)
return True
def togglePlayModeShuffle(self):
try:
if self.sonosDevice.play_mode == 'SHUFFLE':
self.sonosDevice.play_mode = 'REPEAT_ALL'
MusicLogging.Instance().info("now: REPEAT_ALL")
elif self.sonosDevice.play_mode == 'REPEAT_ALL':
self.sonosDevice.play_mode = 'SHUFFLE'
MusicLogging.Instance().info("now: SHUFFLE")
elif self.sonosDevice.play_mode == 'NORMAL':
self.sonosDevice.play_mode = 'SHUFFLE_NOREPEAT'
MusicLogging.Instance().info("now: SHUFFLE_NOREPEAT")
elif self.sonosDevice.play_mode == 'SHUFFLE_NOREPEAT':
self.sonosDevice.play_mode = 'NORMAL'
MusicLogging.Instance().info("now: NORMAL")
else:
self.sonosDevice.play_mode = 'NORMAL'
MusicLogging.Instance().info("now: NORMAL")
return True
except:
return False
def playModeNormal(self):
try:
MusicLogging.Instance().info("do NORMAL")
self.sonosDevice.play_mode = 'NORMAL'
return True
except:
return False
def playModeRepeatAll(self):
try:
MusicLogging.Instance().info("do REPEAT_ALL")
self.sonosDevice.play_mode = 'REPEAT_ALL'
return True
except:
return False
def playModeShuffle(self):
try:
MusicLogging.Instance().info("do SHUFFLE")
self.sonosDevice.play_mode = 'SHUFFLE'
return True
except:
return False
def playModeShuffleNoRepeat(self):
try:
MusicLogging.Instance().info("do SHUFFLE_NOREPEAT")
self.sonosDevice.play_mode = 'SHUFFLE_NOREPEAT'
return True
except:
return False
def clearQueue(self):
if self.clear:
try:
self.sonosDevice.clear_queue()
return True
except:
return False
return False
def unjoin(self):
if self.unjoinBool:
try:
self.sonosDevice.unjoin()
MusicLogging.Instance().info("unjoined")
return True
except:
return False
return False
def unjoinForced(self):
try:
self.sonosDevice.unjoin()
MusicLogging.Instance().info("unjoined")
return True
except:
return False
def stop(self):
try:
self.sonosDevice.stop()
MusicLogging.Instance().info(" Sonos stopped")
return True
except:
return False
def pause(self):
try:
self.sonosDevice.pause()
MusicLogging.Instance().info(" Sonos paused")
return True
except:
return False
def next(self):
try:
self.sonosDevice.next()
MusicLogging.Instance().info(" playing next track")
return True
except:
return False
def previous(self):
try:
self.sonosDevice.previous()
MusicLogging.Instance().info(" playing previous track")
return True
except:
return False
def restart(self):
try:
self.sonosDevice.play()
MusicLogging.Instance().info(" unpause()...")
return True
except:
return False
def playPause(self):
try:
transportInfo = self.sonosDevice.get_current_transport_info()
if transportInfo['current_transport_state'] == 'PLAYING':
self.sonosDevice.pause()
MusicLogging.Instance().info(" pause()...")
elif transportInfo['current_transport_state'] == 'PAUSED_PLAYBACK':
self.sonosDevice.play()
MusicLogging.Instance().info(" play()...")
elif transportInfo['current_transport_state'] == 'STOPPED':
self.sonosDevice.play()
MusicLogging.Instance().info(" play()... from start")
else:
self.sonosDevice.play()
MusicLogging.Instance().info(" play()... from unknown state")
return True
except:
return False
def get_current_track_info(self):
try:
trackInfo = self.sonosDevice.get_current_track_info()
return trackInfo
except:
return {}
| {
"content_hash": "2b338a7946974289f84a57e2af25c778",
"timestamp": "",
"source": "github",
"line_count": 444,
"max_line_length": 134,
"avg_line_length": 35.13513513513514,
"alnum_prop": 0.5527564102564102,
"repo_name": "clemenstyp/music-on-blocks",
"id": "4708d0cb4db19fa51a56e7acf142c03990e0bb9f",
"size": "15622",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/sonosController.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "918"
},
{
"name": "HTML",
"bytes": "22098"
},
{
"name": "Python",
"bytes": "63017"
},
{
"name": "Shell",
"bytes": "188"
}
],
"symlink_target": ""
} |
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
A Moniter
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Specials.Simulaters.Populater"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Representer"
SYS.setSubModule(globals())
#</DefineAugmentation>
#<ImportSpecificModules>
#</ImportSpecificModules>
#<DefineDoStrsList>
DoStrsList=["Moniter","Monit","Monitering","Monitered"]
#<DefineDoStrsList>
#<DefineClass>
@DecorationClass()
class MoniterClass(BaseClass):
#Definition
RepresentingKeyStrsList=[
'MonitoringTypeStr',
'MonitoringIndexIntsList',
'MonitoredDeriveSimulaterPointedVariable',
'MonitoredDeriveClockerPointedVariable'
]
def default_init(self,
_MonitoringTypeStr='State',
_MonitoringDeriveSimulaterGetStr="/",
_MonitoringDeriveClockerGetStr="/NodedParentVariable/<Clockome>RecordClocker",
_MonitoringVariableStr="",
_MonitoringIndexIntsList=None,
_MonitoredDeriveSimulaterPointedVariable=None,
_MonitoredDeriveClockerPointedVariable=None,
**_KwargVariablesDict
):
#Call the parent init method
BaseClass.__init__(self,**_KwargVariablesDict)
#<DefineDoMethod>
def do_monit(self):
#link
self.link(
[
[self.MonitoringDeriveSimulaterGetStr,'MonitoredDeriveSimulater'],
[self.MonitoringDeriveClockerGetStr,'MonitoredDeriveClocker']
]
)
#</DefineClass>
| {
"content_hash": "0f333c2e276515bd1a3be1fd761e6d55",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 84,
"avg_line_length": 22.582089552238806,
"alnum_prop": 0.729676140118969,
"repo_name": "Ledoux/ShareYourSystem",
"id": "1ea462b9cfc2af201b965b3330803a65cbce3eff",
"size": "1537",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Pythonlogy/draft/Tracer/draft/__init__ copy.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "86"
},
{
"name": "C++",
"bytes": "4244220"
},
{
"name": "CSS",
"bytes": "142769"
},
{
"name": "CoffeeScript",
"bytes": "37331"
},
{
"name": "HTML",
"bytes": "36211676"
},
{
"name": "JavaScript",
"bytes": "2147968"
},
{
"name": "Jupyter Notebook",
"bytes": "7930602"
},
{
"name": "Makefile",
"bytes": "6362"
},
{
"name": "PHP",
"bytes": "11096341"
},
{
"name": "Python",
"bytes": "5700092"
},
{
"name": "Ruby",
"bytes": "60"
},
{
"name": "Scala",
"bytes": "2412"
},
{
"name": "Shell",
"bytes": "2525"
},
{
"name": "Swift",
"bytes": "154"
},
{
"name": "TeX",
"bytes": "2556"
},
{
"name": "XSLT",
"bytes": "20993"
}
],
"symlink_target": ""
} |
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=3.0.0', 'setuptools>=17.1'],
pbr=True)
| {
"content_hash": "e148654ab7539407a3b535e7a2e66b2e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 68,
"avg_line_length": 26.5,
"alnum_prop": 0.7169811320754716,
"repo_name": "dwmarshall/pycontribs-jira",
"id": "405bf85f98d53998b964ed906b02fc1eac99d63c",
"size": "393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "5176"
},
{
"name": "Python",
"bytes": "327843"
},
{
"name": "Shell",
"bytes": "1179"
}
],
"symlink_target": ""
} |
import boto3
import time
class EC2:
def __init__(self, access_key_id, secret_access_key, region_name='sa-east-1', **kwargs):
self._client = boto3.client(
'ec2',
region_name=region_name,
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key)
def client(self):
return self._client
class Client:
def __init__(self, ec2_client):
self.ec2_client = ec2_client
self.response = None
def instance_ids(self, tag_name=''):
self.response = self.ec2_client.describe_instances()
reservations = self.response['Reservations']
if not tag_name:
return [it['Instances'][0]['InstanceId'] for it in reservations]
return [
it['Instances'][0]['InstanceId']
for it in reservations
if 'Tags' in it['Instances'][0].keys() and
it['Instances'][0]['Tags'][0].values()]
def instance_status(self, instance_id):
self.response = self.ec2_client.describe_instances()
instance = self._instance_by_id(instance_id)
status = instance['State']['Name']
return status
def _instance_by_id(self, instance_id):
if not self.response:
self.response = self.ec2_client.describe_instances()
reservations = self.response['Reservations']
for it in reservations:
instance = it['Instances'][0]
if instance_id == instance['InstanceId']:
return instance
return None
def start_instance(self, instance_id):
instance = self._instance_by_id(instance_id)
if instance['State']['Name'] in ['pending', 'running']:
return
if instance['State']['Name'] == 'stopping':
self.wait_to('stopped', instance_id)
self.ec2_client.start_instances(InstanceIds=[instance_id])
def wait_to(self, status, instance_id):
current_status = self.instance_status(instance_id)
while current_status != status:
time.sleep(2)
current_status = self.instance_status(instance_id)
def reboot_instance(self, instance_id):
self.ec2_client.reboot_instances(
InstanceIds=[instance_id])
def stop_instance(self, instance_id):
if self.instance_status(instance_id) == 'stopped' or \
self.instance_status(instance_id) == 'stopping':
return
self.ec2_client.stop_instances(InstanceIds=[instance_id])
self.response = self.ec2_client.describe_instances()
def terminate_instance(self, instance_id):
if self.instance_status(instance_id) == 'terminated':
return
self.ec2_client.terminate_instances(InstanceIds=[instance_id])
def _running(self, tag_name=None, **kwargs):
response = self.ec2_client.run_instances(**kwargs)
instance_id = response['Instances'][0]['InstanceId']
if tag_name:
self.ec2_client.create_tags(
Resources=[instance_id],
Tags=[{'Key': 'Name', 'Value': tag_name}])
return instance_id
def run_instance(self, **kwargs):
return self._running(**kwargs)
def instance_public_ip(self, instance_id):
instance = self._instance_by_id(instance_id)
if self.instance_status(instance_id) == 'running':
return instance['PublicIpAddress']
return ''
| {
"content_hash": "1b890576f48c0dd27674390d7ab0f5e7",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 92,
"avg_line_length": 35.78125,
"alnum_prop": 0.597962154294032,
"repo_name": "w2srobinho/aws-curioh",
"id": "17d01a50772a85dab5bc90cc754247448670ed6c",
"size": "3435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws_curioh/ec2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9576"
}
],
"symlink_target": ""
} |
"""
Implementation of an auto-vivified dict.
"""
class AutovivifiedDict(dict):
'''
Implementation of an auto-vivified dict.
Auto-vivified objects will automatically create a new object of type(self)
when a user queries for a missing key.
Based on: http://stackoverflow.com/a/6781411/577199
>>> d = AutovivifiedDict()
>>> d[1][2][3] = 4
>>> d
{1: {2: {3: 4}}}
'''
def __missing__(self, key):
# If a user tries to access a missing key, create a new object
# of type(self) and set the key to that.
value = self[key] = type(self)()
return value
if __name__ == "__main__":
import doctest
doctest.testmod()
| {
"content_hash": "3cb28e669d61eac80971aa1bbe4f26e5",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 78,
"avg_line_length": 23.862068965517242,
"alnum_prop": 0.596820809248555,
"repo_name": "Ceasar/trees",
"id": "4295f60e5309de48bc01520181aea4d1639d95db",
"size": "692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trees/autovivify.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8196"
}
],
"symlink_target": ""
} |
"""
Django settings for ocrDoacao project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'w5yj*2auiwwqif_51!&)ep-do1_9v(t3uxm%uv^@4z#dopk+@m'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'ocrDoacao',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'ocrDoacao.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['ocrDoacao/template'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ocrDoacao.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'ocr',
'USER': 'ocr',
'PASSWORD': 'temosquedoarissoeusei!!!',
'HOST': 'localhost', # Or an IP Address that your DB is hosted on
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| {
"content_hash": "053c986dd60c9160dbb26d70f2337e39",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 75,
"avg_line_length": 26.61320754716981,
"alnum_prop": 0.6816731655441333,
"repo_name": "ocr-doacao/ocr",
"id": "423574660b73da84b07afdaee3a75c05e4593770",
"size": "2821",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ocrDoacao/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6378"
},
{
"name": "HTML",
"bytes": "9285"
},
{
"name": "JavaScript",
"bytes": "21748"
},
{
"name": "Python",
"bytes": "19893"
}
],
"symlink_target": ""
} |
from .base import SimIRExpr
class SimIRExpr_ITE(SimIRExpr):
def _execute(self):
cond = self._translate_expr(self._expr.cond)
expr0 = self._translate_expr(self._expr.iffalse)
exprX = self._translate_expr(self._expr.iftrue)
self.expr = self.state.se.If(cond.expr == 0, expr0.expr, exprX.expr)
| {
"content_hash": "745fb73e95824d43332432327cb3410e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 76,
"avg_line_length": 36.55555555555556,
"alnum_prop": 0.6595744680851063,
"repo_name": "axt/angr",
"id": "3237f0b541bc0eaa55d416f7d163bdffecbc67b4",
"size": "329",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "angr/engines/vex/expressions/ite.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6375"
},
{
"name": "C++",
"bytes": "38446"
},
{
"name": "Makefile",
"bytes": "617"
},
{
"name": "Python",
"bytes": "2753899"
}
],
"symlink_target": ""
} |
"""
Estimate pi using a Monte Carlo method with distarray.
"""
from __future__ import division, print_function
from util import timer
from distarray.globalapi import Context, Distribution, hypot
from distarray.globalapi.random import Random
context = Context()
random = Random(context)
@timer
def calc_pi(n):
"""Estimate pi using distributed NumPy arrays."""
distribution = Distribution(context=context, shape=(n,))
x = random.rand(distribution)
y = random.rand(distribution)
r = hypot(x, y)
mask = (r < 1)
return 4 * mask.sum().toarray() / n
def main(N):
result, time = calc_pi(N)
print('time : %3.4g\nresult: %.7f' % (time, result))
if __name__ == '__main__':
import argparse
formatter = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=formatter)
parser.add_argument("npoints", metavar="N", type=int,
help=("number of points to use in estimation"))
args = parser.parse_args()
main(args.npoints)
| {
"content_hash": "2d8aec05e11f179b9dadbfd0c31040e3",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 71,
"avg_line_length": 26.5609756097561,
"alnum_prop": 0.6437098255280074,
"repo_name": "RaoUmer/distarray",
"id": "5a9e657a0b127310ecf77c5ca65a7b19c924f9c6",
"size": "1406",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/pi_montecarlo/pi_distarray.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "4001"
},
{
"name": "Python",
"bytes": "483961"
},
{
"name": "Shell",
"bytes": "5121"
}
],
"symlink_target": ""
} |
import unittest
import pybles
import os
class ParseTableTest(unittest.TestCase):
def test_parse_table(self):
pyble = pybles.PybleParser()
self.assertRaises(pybles.ParseError, pyble.parse_string, "")
config = "filter {}"
pyble.parse_string(config)
config = "filter {}nat {}mangle {}raw {}security {}"
pyble.parse_string(config)
def test_parse_table_chain(self):
pyble = pybles.PybleParser()
config = "filter {input {}}"
pyble.parse_string(config)
def test_parse_chain_in_chain(self):
pyble = pybles.PybleParser()
config = "filter { input { baz {}}}"
pyble.parse_string(config)
pyble = pybles.PybleParser()
config = "filter {input { foo { bar {} } }}"
pyble.parse_string(config)
def test_parse_directive(self):
pyble = pybles.PybleParser()
config = "filter { input { target; }}"
pyble.parse_string(config)
pyble = pybles.PybleParser()
config = "filter { input { target option_name option_value;}}"
pyble.parse_string(config)
pyble = pybles.PybleParser()
config = "filter { input { foo { target option_name option_value;}}}"
pyble.parse_string(config)
pyble = pybles.PybleParser()
config = "filter { input { foo { target option_name option_value; bar {}}}}"
pyble.parse_string(config)
pyble = pybles.PybleParser()
config = "filter { input { foo { target option_name option_value; bar { target option_name option_value; }}}}"
pyble.parse_string(config)
def test_parse_from_file(self):
p = pybles.PybleParser()
p.parse_file("%s/test.conf" % os.path.dirname(__file__))
| {
"content_hash": "1913e43150ca4a5536d057418e7ea993",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 118,
"avg_line_length": 32.19642857142857,
"alnum_prop": 0.5867997781475319,
"repo_name": "abates/pybles",
"id": "c90437a196a4f769cf76fcc2d59f331cff74a144",
"size": "2380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/parse_table_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34922"
}
],
"symlink_target": ""
} |
import numpy as np
import cv2
def all_structures(data):
""" Calculates all structures for the principal component projections. Runs k-means a number of times and
saves the result with lowest distortion.
:param data: Data set.
"""
images, pc_projections, pcs = data.pca.load()
pc_projections_truncated = pc_projections[:, :data.analysis.config.pc_projection_count]
termination_criteria = (cv2.TERM_CRITERIA_EPS, data.analysis.config.iterations, 0.0001)
ret, labels, centroids = cv2.kmeans(pc_projections_truncated.astype(np.float32), data.analysis.config.clusters,
termination_criteria, data.analysis.config.runs, cv2.KMEANS_RANDOM_CENTERS)
structure_indices = []
for label in range(0, centroids.shape[0]):
structure_indices.append(np.where(labels == label)[0])
structure_indices = np.array(structure_indices)
data.analysis.save_structures(centroids, structure_indices)
def score_structures(analysis_data):
""" Scores structures based on the representative result.
:param analysis_data: Analysis data set.
"""
closest_group, representative = analysis_data.load_closest()
centroids, structures = analysis_data.load_structures()
scores = []
lengths = []
for structure in structures:
lengths.append(len(structure))
score = 0
for index in structure:
if index in representative:
score += 3
elif index in closest_group:
score += 1
scores.append(score)
scores = np.array(scores)
lengths = np.array(lengths)
# When multiple clusters have the same score the one with most images is ranked higher.
length_scores = np.max(lengths) * scores + lengths
ordered = np.argsort(length_scores)[::-1] # Sort and reverse to get descending
scored_structures = structures[ordered]
analysis_data.save_scored_structures(scored_structures) | {
"content_hash": "bb249c8e00642e9798b9c2efef0bc35e",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 115,
"avg_line_length": 33,
"alnum_prop": 0.6732323232323232,
"repo_name": "oscarlorentzon/repstruct",
"id": "1644d3991188b36f8d13b8f7248ef0f398a6c5f2",
"size": "1980",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "repstruct/analysis/kmeans.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "126642"
},
{
"name": "Shell",
"bytes": "259"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.