repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
armab/st2contrib | packs/servicenow/actions/approve_change.py | Python | apache-2.0 | 317 | 0 | from lib.actions import BaseAction
class ApprovalAction(BaseAction):
def run(self, number):
s = self.client
s.table = 'chang | e_request'
res = s.get({'number': number})
sys_id = res[0]['sys_id']
resp | onse = s.update({'approval': 'approved'}, sys_id)
return response
|
ufal/udpipe | releases/pypi/test/test_udpipe.py | Python | mpl-2.0 | 1,690 | 0.004782 | #!/usr/bin/python
# vim:fileencoding=utf8
from __future__ import unicode_literals
import unittest
class TestUDPipe(unittest.TestCase):
def test_model(self):
import ufal.udpipe
model = ufal.udpipe.Model.load('test/data/test.model')
self.assertTrue(model)
tokenizer = model.newTokenizer(model.DEFAULT)
conlluOutput = ufal.udpipe.OutputFormat.newOutputFormat("conllu")
sentence = ufal.udpipe.Sentence()
error = ufal.udpipe.ProcessingError();
| tokenizer.setText("Znamená to, že realitě nepodléhá. ");
self.assertTrue(tokenizer.nextSentence(sentence, error))
self.assertFalse(error.occurred())
self.assertTrue(model.tag(sentence, model.DEFAULT))
self.assertTrue(model.pars | e(sentence, model.DEFAULT))
self.assertEqual(conlluOutput.writeSentence(sentence), """\
# newdoc
# newpar
# sent_id = 1
# text = Znamená to, že realitě nepodléhá.
1 Znamená znamenat VERB VB-S---3P-AA--- Aspect=Imp|Mood=Ind|Negative=Pos|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 0 root _ _
2 to ten PRON PDNS1---------- Case=Nom|Gender=Neut|Number=Sing|PronType=Dem 1 nsubj _ SpaceAfter=No
3 , , PUNCT Z:------------- _ 6 punct _ _
4 že že SCONJ J,------------- _ 6 mark _ _
5 realitě realita NOUN NNFS3-----A---- Case=Dat|Gender=Fem|Negative=Pos|Number=Sing 6 dobj _ _
6 nepodléhá podléhat VERB VB-S---3P-NA--- Aspect=Imp|Mood=Ind|Negative=Neg|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 1 ccomp _ SpaceAfter=No
7 . . PUNCT Z:------------- _ 1 punct _ _
""")
self.assertFalse(tokenizer.nextSentence(sentence))
if __name__ == '__main__':
unittest.main()
|
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_3_0_0/models/graphdefinition_tests.py | Python | bsd-3-clause | 2,760 | 0.005435 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 on 2017-03-22.
# 2017, SMART Health IT.
import io
import json
import os
import unittest
from . import graphdefinition
from .fhirdate import FHIRDate
class GraphDefinitionTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("GraphDefinition", js["resourceType"])
return graphdefinition.GraphDefinition(js)
def testGraphDefinition1(self):
inst = self.instantiate_from("graphdefinition-example.json")
self.assertIsNotNone(inst, "Must have instantiated a GraphDefinition instance")
self.implGraphDefinition1(inst)
js = inst.as_json()
self.assertEqual("GraphDefinition", js["resourceType"])
inst2 = graphdefinition.GraphDefinition(js)
self.implGraphDefinition1(inst2)
def implGraphDefinition1(self, inst):
self.assertEqual(inst.contact[0].telecom[0].system, "url")
self.assertEqual(inst.contact[0].telecom[0].value, "http://hl7.org/fhir")
self.assertEqual(inst.date.date, FHIRDate("2015-08-04").date)
self.assertEqual(inst.date.as_json(), "2015-08-04")
self.assertEqual(inst.description, "Specify to include list references when generating a document using the $document operation")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.link[0].description, "Link to List")
self.assertEqual(inst.link[0].path, "Composition.section.entry")
self.assertEqual(inst.link[0].target[0].compartment[0].code, "Patient")
self.assertEqual(inst.link[0].target[0].compartment[0].rule, "identical")
self.assertEqual(inst.link[0].target[0].link[0].description, | "Include any list entries")
self.assertEqual(inst.link[0].target[0].link[0].path, "List.entry.item")
self.assertEqual(inst.link[0].target[0].link[0].target[0].compartment[0].code, "Patient")
self.assertEqual(inst.link[0].target[0].link[0].target[0].compartment[0]. | rule, "identical")
self.assertEqual(inst.link[0].target[0].link[0].target[0].type, "Resource")
self.assertEqual(inst.link[0].target[0].type, "List")
self.assertEqual(inst.name, "Document Generation Template")
self.assertEqual(inst.publisher, "FHIR Project")
self.assertEqual(inst.start, "Composition")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.url, "http://h7.org/fhir/GraphDefinition/example")
|
blorgon9000/pyopus | demo/parallel/cooperative/05-asyncloop.py | Python | gpl-3.0 | 2,431 | 0.051008 | # This demo does the same as the dyndispatch demo, except that a
# custom dispatcher loop is used. This is how asynchronous parallel
# optimization algorithms like DE and PSADE are implemented.
# mpirun -n 4 python 05-asyncloop.py
from pyopus.parallel.cooperative import cOS
from pyopus.parallel.mpi import MPI
from funclib import jobProcessor
# Result at which we stop
stopAtResult=150
# Minimal and ma | ximal number of parallel tasks
# Th | e maximal number of parallel tasks can be infinite (set maxTasks to None)
minTasks=1
maxTasks=1000
if __name__=='__main__':
# Set up MPI
cOS.setVM(MPI())
# Thsi list will hold the jobs (values that are doubled)
jobs=[]
# This list will be filled with results
results=[]
# Stop the loop
stop=False
# Running task status storage
running={}
# Job index of next job
atJob=0
# Main loop
# Run until stop flag set and all tasks are joined
while not (stop and len(running)==0):
# Spawn tasks if slots are available and maximal number of tasks is not reached
# Spawn one task if there are no tasks
while (
# Spawn
not stop and (
# no tasks running, need at least one task, spawn
len(running)==0 or
# too few slaves in a parallel environment (number of slots > 0),
# force spawn regardless of the number of free slots
(cOS.slots()>0 and len(running)<minTasks) or
# free slots available and less than maximal slaves, spawn
(cOS.freeSlots()>0 and (maxTasks is None or len(running)<maxTasks))
)
):
# Job (value to double)
job=atJob
# Spawn a global search task
tid=cOS.Spawn(jobProcessor, args=[job], remote=True, block=True)
print "Spawned task", tid, "for job", job
# Store the job
running[tid]={
'index': atJob,
'job': job,
}
# Go to next job
atJob+=1
# Join jobs
tid,result = cOS.Join(block=True).popitem()
print "Received", result, "from", tid
# Get status and remove it from the dictionarz of running jobs
status=running[tid]
del running[tid]
index=status['index']
# Make space for the result
if index>=len(results):
results.extend([None]*(index+1-len(results)))
# Store result
results[index]=result
# Check if we need to stop
if result>=stopAtResult and not stop:
stop=True
print "Spawning no more tasks"
print("Results: "+str(results))
# Finish, need to do this if MPI is used
cOS.finalize()
|
TheTimmy/spack | var/spack/repos/builtin.mock/packages/dtbuild2/package.py | Python | lgpl-2.1 | 1,542 | 0 | ########## | ####################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free so | ftware; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Dtbuild2(Package):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild2-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
def install(self, spec, prefix):
pass
|
Ultimaker/Uranium | UM/Math/Plane.py | Python | lgpl-3.0 | 975 | 0.005128 | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
from UM.Math.Vecto | r import Vector
from UM.Math.Float import Float
class Plane:
"""Plane representation using normal and distance."""
def __init__(self, normal = Vector(), distance = 0.0):
super().__init__()
self._normal = normal
self._distance = distance
@property
def normal(self):
return self | ._normal
@property
def distance(self):
return self._distance
def intersectsRay(self, ray):
w = ray.origin - (self._normal * self._distance)
nDotR = self._normal.dot(ray.direction)
nDotW = -self._normal.dot(w)
if Float.fuzzyCompare(nDotR, 0.0):
return False
t = nDotW / nDotR
if t < 0:
return False
return t
def __repr__(self):
return "Plane(normal = {0}, distance = {1})".format(self._normal, self._distance)
|
petrvanblokland/Xierpa3 | xierpa3/attributes/frames.py | Python | mit | 1,825 | 0.010411 | # -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ buro@petr.com, www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
# frames.py
#
from xierpa3.attributes.attribute import Attribute
from xierpa3.attributes.values import asValue
class Frame(Attribute):
def __init__(self, t, r=None, b=None, l=None):
# Margin(2)
# Margin(2,3,4,5)
self.t = t
self.r = r
self.b = b
self.l = l
def _get_value(self):
if self.r is not None and (self.b is None or self.l is None):
return '%s %s' % (asValue(self.t), asValue(self.r))
if self.r is None or self.b is None or self.l is None:
return asValue(self.t)
return '%s %s %s %s' % (asValue(self.t), asValue(self.r),asValue(self.b), asValue(self.l))
value = property(_get_value)
def build(self, name, builder, prefix=None):
builder.output(self.value)
def _get_raw(self):
return self.id, self.t, self.r, self.b, self.l
raw = property(_get_raw)
class Margin(Frame):
pass
class Padding(Frame):
pass
class Border(Frame):
def __init__(self, v1, v2=None, v3=None):
# Border('solid', 2, Color('E1E2E2'))
# @@@ Can be extended by parsing the values. |
self.v1 = v1
self.v2 = v2
self.v3 = v3
def _get_value(self):
return '%s %s %s' % (self.v1, self.v2 or '', self.v3 or | '')
value = property(_get_value)
def _get_raw(self):
return self.id, self.v1, self.v2, self.v3
raw = property(_get_raw)
|
alex/taskmaster | setup.py | Python | apache-2.0 | 977 | 0 | #!/usr/bin/python
from setuptools import setup, find_packages
setup(
name="taskmaster",
license='Apache License 2.0',
version | ="0.5.2",
description="",
author="David Cramer",
author_email="dcramer@gmail.com",
url="https://github.com/dcramer/taskmaster",
packages=find_packages("src"),
package_dir={'': 'src'},
entry_points={
'console_scripts': [
'tm-master = taskmaster.cli.master:main',
'tm-slave = taskmaster.cli.slave:main | ',
'tm-spawn = taskmaster.cli.spawn:main',
],
},
install_requires=[
'progressbar',
'gevent',
'gevent_zeromq',
# 'pyzmq-static',
],
tests_require=[
'unittest2',
'Nose>=1.0',
],
classifiers=[
"Environment :: Console",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
])
|
antoinecarme/pyaf | tests/model_control/detailed/transf_Difference/model_control_one_enabled_Difference_Lag1Trend_Seasonal_MonthOfYear_NoAR.py | Python | bsd-3-clause | 165 | 0.048485 | import | tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Difference'] , ['Lag1Trend'] , ['Seasonal_MonthOfYear'] , | ['NoAR'] ); |
KaiRo-at/socorro | alembic/versions/21e4e35689f6_bug_993786_update_crash_adu_by_build_.py | Python | mpl-2.0 | 728 | 0.012363 | """bug 993786 - update_crash_adu_by_build_signature-bad-buildids
Revision ID: 21e4e35689f6
Revises: 224f0fda6ecb
Create Date: 2014-04-08 18:46:19.755028
"""
# revision identifiers, used by Alembic.
revision = '21e4e35689f6'
down_revision = '224f0fda6ecb'
from alembic import op
from socorrolib.lib import citexttype, jsontype, buildtype
from socorrolib.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
load_stored_proc(op, ['update_crash_adu_by_build_signature.sql'])
def downgrade():
load_stored_proc(op, ['update_crash_adu_b | y_build_ | signature.sql'])
|
rcarrillocruz/ansible | lib/ansible/modules/network/f5/bigip_iapp_service.py | Python | gpl-3.0 | 14,587 | 0.000686 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundat | ion, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. | See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'
}
DOCUMENTATION = '''
---
module: bigip_iapp_service
short_description: Manages TCL iApp services on a BIG-IP.
description:
- Manages TCL iApp services on a BIG-IP.
version_added: "2.4"
options:
name:
description:
- The name of the iApp service that you want to deploy.
required: True
template:
description:
- The iApp template from which to instantiate a new service. This
template must exist on your BIG-IP before you can successfully
create a service. This parameter is required if the C(state)
parameter is C(present).
parameters:
description:
- A hash of all the required template variables for the iApp template.
If your parameters are stored in a file (the more common scenario)
it is recommended you use either the `file` or `template` lookups
to supply the expected parameters.
force:
description:
- Forces the updating of an iApp service even if the parameters to the
service have not changed. This option is of particular importance if
the iApp template that underlies the service has been updated in-place.
This option is equivalent to re-configuring the iApp if that template
has changed.
default: False
state:
description:
- When C(present), ensures that the iApp service is created and running.
When C(absent), ensures that the iApp service has been removed.
default: present
choices:
- present
- absent
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
- Requires the deepdiff Python package on the host. This is as easy as pip
install f5-sdk.
requirements:
- f5-sdk
- deepdiff
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Create HTTP iApp service from iApp template
bigip_iapp_service:
name: "foo-service"
template: "f5.http"
parameters: "{{ lookup('file', 'f5.http.parameters.json') }}"
password: "secret"
server: "lb.mydomain.com"
state: "present"
user: "admin"
delegate_to: localhost
- name: Upgrade foo-service to v1.2.0rc4 of the f5.http template
bigip_iapp_service:
name: "foo-service"
template: "f5.http.v1.2.0rc4"
password: "secret"
server: "lb.mydomain.com"
state: "present"
user: "admin"
delegate_to: localhost
- name: Configure a service using parameters in YAML
bigip_iapp_service:
name: "tests"
template: "web_frontends"
password: "admin"
server: "{{ inventory_hostname }}"
server_port: "{{ bigip_port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
user: "admin"
parameters:
variables:
- name: "var__vs_address"
value: "1.1.1.1"
- name: "pm__apache_servers_for_http"
value: "2.2.2.1:80"
- name: "pm__apache_servers_for_https"
value: "2.2.2.2:80"
delegate_to: localhost
- name: Re-configure a service whose underlying iApp was updated in place
bigip_iapp_service:
name: "tests"
template: "web_frontends"
password: "admin"
force: yes
server: "{{ inventory_hostname }}"
server_port: "{{ bigip_port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
user: "admin"
parameters:
variables:
- name: "var__vs_address"
value: "1.1.1.1"
- name: "pm__apache_servers_for_http"
value: "2.2.2.1:80"
- name: "pm__apache_servers_for_https"
value: "2.2.2.2:80"
delegate_to: localhost
'''
RETURN = '''
# only common fields returned
'''
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
HAS_F5SDK,
F5ModuleError,
iteritems,
iControlUnexpectedHTTPError
)
from deepdiff import DeepDiff
class Parameters(AnsibleF5Parameters):
returnables = []
api_attributes = [
'tables', 'variables', 'template', 'lists'
]
updatables = ['tables', 'variables', 'lists']
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def tables(self):
result = []
if not self._values['tables']:
return None
tables = self._values['tables']
for table in tables:
tmp = dict()
name = table.get('name', None)
if name is None:
raise F5ModuleError(
"One of the provided tables does not have a name"
)
tmp['name'] = str(name)
columns = table.get('columnNames', None)
if columns:
tmp['columnNames'] = [str(x) for x in columns]
# You cannot have rows without columns
rows = table.get('rows', None)
if rows:
tmp['rows'] = []
for row in rows:
tmp['rows'].append(dict(row=[str(x) for x in row['row']]))
result.append(tmp)
result = sorted(result, key=lambda k: k['name'])
return result
@tables.setter
def tables(self, value):
self._values['tables'] = value
@property
def variables(self):
result = []
if not self._values['variables']:
return None
variables = self._values['variables']
for variable in variables:
tmp = dict((str(k), str(v)) for k, v in iteritems(variable))
if 'encrypted' not in tmp:
# BIG-IP will inject an 'encrypted' key if you don't provide one.
# If you don't provide one, then we give you the default 'no', by
# default.
tmp['encrypted'] = 'no'
if 'value' not in tmp:
tmp['value'] = ''
# This seems to happen only on 12.0.0
elif tmp['value'] == 'none':
tmp['value'] = ''
result.append(tmp)
result = sorted(result, key=lambda k: k['name'])
return result
@variables.setter
def variables(self, value):
self._values['variables'] = value
@property
def lists(self):
result = []
if not self._values['lists']:
return None
lists = self._values['lists']
for list in lists:
tmp = dict((str(k), str(v)) for k, v in iteritems(list) if k != 'value')
if 'encrypted' not in list:
# BIG-IP will inject an 'encrypted' key if you don't provide one.
# If you don't provide one, then we give you the default 'no', by
# default.
tmp[ |
migue/voltdb | lib/python/vdm/server/Configuration.py | Python | agpl-3.0 | 59,480 | 0.002808 | # This file is part of VoltDB.
# Copyright (C) 2008-2017 VoltDB Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
import os
from collections import defaultdict
import json
import traceback
from xml.etree.ElementTree import Element, SubElement, tostring, XML
import sys
from flask import jsonify
import HTTPListener
import DeploymentConfig
from Validation import ServerInputs, DatabaseInputs, JsonInputs, UserInputs, ConfigValidation
from logging.handlers import RotatingFileHandler
import logging
import ast
import itertools
from Validation import Validation
def convert_xml_to_json(config_path):
"""
Method to get the json content from xml file
:param config_path (string): path of xml file
"""
with open(config_path) as config_file:
xml = config_file.read()
config_content = XML(xml)
xml_final = etree_to_dict(config_content)
D2 = {}
for (k, v) in zip(xml_final.keys(), xml_final.values()):
D2[k] = v
# To get the list of servers in case of old members[] (for backward compatible)
if 'members' in D2[k] and 'member' in D2[k]['members'] and D2[k]['members']['member']:
if type(D2[k]['members']['member']) is dict:
member_json = get_field_from_xml(D2[k]['members']['member'], 'dict')
HTTPListener.Global.SERVERS[member_json[0]['id']] = member_json[0]
else:
member_json = get_field_from_xml(D2[k]['members']['member'], 'list')
for member in member_json:
HTTPListener.Global.SERVERS[member['id']] = member
if type(D2[k]['databases']['database']) is dict:
db_json = get_field_from_xml(D2[k]['databases']['database'],
'dict', 'database')
HTTPListener.Global.DATABASES[db_json[0]['id']] = db_json[0]
else:
db_json = get_field_from_xml(D2[k]['databases']['database'],
'list', 'database')
for database in db_json:
HTTPListener.Global.DATABASES[database['id']] = database
if type(D2[k]['deployments']['deployment']) is dict:
deployment_json = get_deployment_from_xml(D2[k]['deployments']
['deployment'], 'dict')
HTTPListener.Global.DEPLOYMENT[deployment_json[0]['databaseid']] = deployment_json[0]
else:
deployment_json = get_deployment_from_xml(D2[k]['deployments']
['deployment'], 'list')
for deployment in deployment_json:
HTTPListener.Global.DEPLOYMENT[deployment['databaseid']] = deployment
if D2[k]['deployments'] and 'deployment' in D2[k]['deployments']:
if type(D2[k]['deployments']['deployment']) is dict:
set_deployment_users(D2[k]['deployments']['deployment'])
else:
for deployment in D2[k]['deployments']['deployment']:
set_deployment_users(deployment)
def set_deployment_users(deployment):
if 'users' in deployment and deployment['users'] is not None\
and 'user' in deployment['users']:
if type(deployment) is dict:
user_json = get_users_from_xml(deployment,
'dict')
for user in user_json:
HTTPListener.Global.DEPLOYMENT_USERS[int(user['userid'])] = user
else:
us | er_json = get | _users_from_xml(deployment,
'list')
for deployment_user in user_json:
HTTPListener.Global.DEPLOYMENT_USERS[int(deployment_user['userid'])] = deployment_user
def validate_and_convert_xml_to_json(config_path):
"""
Method to get the json content from xml file
:param config_path (string): path of xml file
"""
log_file = os.path.join(HTTPListener.Global.DATA_PATH, 'voltdeploy.log')
handler = RotatingFileHandler(log_file)
handler.setFormatter(logging.Formatter(
"%(asctime)s|%(levelname)s|%(message)s"))
log = logging.getLogger('werkzeug')
log.setLevel(logging.NOTSET)
log.addHandler(handler)
try:
with open(config_path) as config_file:
xml = config_file.read()
config_content = XML(xml)
xml_final = etree_to_dict(config_content)
D2 = {}
for (k, v) in zip(xml_final.keys(), xml_final.values()):
D2[k] = v
populate_database(D2[k]['databases']['database'], log)
if 'members' in D2[k] and 'member' in D2[k]['members'] and D2[k]['members']['member']:
if type(D2[k]['members']['member']) is dict:
populate_server(D2[k]['members']['member'], D2[k]['databases']['database'], log)
populate_deployment(D2[k]['deployments']['deployment'], log)
except Exception as err:
log.error("Error while reloading configuration: %s", "Invalid file content.")
def populate_database(databases, log):
success = True
if type(databases) is dict:
db_json = get_database_from_xml(databases,
'dict', log, 'database')
req = HTTPListener.DictClass()
req.json = {}
req.json = db_json[0]
inputs = DatabaseInputs(req)
if not inputs.validate():
success = False
sys.stdout.write(str(inputs.errors))
log.error("Error while reloading configuration: %s", str(inputs.errors))
if success is True:
HTTPListener.Global.DATABASES = {db_json[0]['id']: db_json[0]}
else:
db_json = get_database_from_xml(databases,
'list', log, 'database')
success = True
result = check_duplicate_database(db_json)
if result != "":
success = False
log.error("Error while reloading configuration: %s", result)
else:
for database in db_json:
req = HTTPListener.DictClass()
req.json = {}
req.json = database
inputs = DatabaseInputs(req)
if not inputs.validate():
success = False
sys.stdout.write(str(inputs.errors))
log.error("Error while reloading configuration: %s", str(inputs.errors))
if success is True:
HTTPListener.Global.DATABASES = {}
for database in db_json:
HTTPListener.Global.DATABASES[database['id']] = database
def populate_server(servers, databases, log):
success = True
if type(servers) is dict:
member_json = get_field_from_xml(servers, 'dict')
req = HTTPListener.DictClass()
req.json = {}
req.json = member_json[0]
inputs = ServerInputs(req)
if not inputs.validate():
success = False
sys.stdout.write(str(inputs.errors))
log.error("Error while reloading configuration: %s", str(inputs.errors))
else:
result = validate_server_ports_dict(member_json[0], databases, True)
if result is not None:
success = False
log.error("Error while reloading configuration: %s", result)
if success is True:
HTTPListener.Global.SERVERS = {member_json[0]['id']: member_json[0]}
else:
member_json = get_field_from_xml(servers, 'list')
for member in member_json:
req = HTTPListener.DictClass()
req.json = {}
req.json = member
inputs = ServerInputs(req)
if not inputs.valida |
devilry/devilry-django | devilry/devilry_markup/views.py | Python | bsd-3-clause | 610 | 0.004918 | from django.http import Ht | tpResponse
from django.http import HttpResponseBadRequest
from django.views.generic import View
from devilry.devilry_markup.parse_markdown import markdown_full
class DevilryFlavouredMarkdownFull(View):
def _parse(self, data):
if 'md' in data:
md = data['md']
return HttpResponse(markdown_full(md))
else:
return HttpResponseBadRequest('"md" not in POST data.')
def post(self, request):
| return self._parse(request.POST)
## For debugging:
#def get(self, request):
#return self._parse(request.GET)
|
co-devs/microsoftBookDownloader | microsoftBookDownloader.py | Python | gpl-3.0 | 3,629 | 0.003031 | #!/usr/bin/env python
# Author: Michael Devens
# Derek Ditch <github:@dcode>
# Github: https://github.com/co-devs
# Simple, poorly written script to download all of the files being shared
# by microsoft instead of downloading by hand. Downloads consecutively,
# will therefore take a while. Could be optimized, but meh.
# Also, download path is hardcoded. If you want to use it you will need
# to change the baseDir variable at a minimum
import os
import requests
import bs4
import urllib2
def downloadFile2(url):
result = urllib2.urlopen(url)
filename = os.path.basename(urllib2.urlparse.urlparse(result.url).path)
# print filename
# print result.geturl()
return result.geturl()
def downloadFile(url, path):
local_filename = os.path.join(path, url.split('/')[-1].title())
# print url
# print path
# print local_filename
# NOTE the stream=True parameter
r = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
return local_filename
errors = []
res = requests.get('https://blogs.msdn.microsoft.com/mssmallbiz/2017/07/11/largest-free-microsoft-ebook-giveaway-im-giving-away-millions-of-free-microsoft-ebooks-again-including-windows-10-office-365-office-2016-power-bi-azure-windows-8-1-office-2013-sharepo/?ranMID=24542&ranEAID=lw9MynSeamY&ranSiteID=lw9MynSeamY-ljYIUif9JQSw6mGEPRE6hg&tduid=(35cd2ef014e03b4e91ddad36b13d5d02)(256380)(2459594)(lw9MynSeamY-ljYIUif9JQSw6mGEPRE6hg)(')
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text, "lxml")
# line = line.translate(None, '<>:\"/\|?*')
badChars = '<>:\"/\|?*'
baseDir = "./"
# file = open('file.html')
# soup = bs4.BeautifulSoup(file, "lxml")
elems = soup.select('tbody')
books = elems[0].select('tr')
booksLen = len(books)
for i in xrange(1, booksLen):
# for i in xrange(1, 5):
bookData = books[i].select('td')
category = bookData[0].getText().encode('ascii', 'ignore').translate(None, badChars)
catDir = os.path.join(baseDir, category)
# Check to see if the folder for the category exists
# if not, then make it
try:
os.stat(catDir)
# | print catDir, ' Exists'
except:
# print 'Mkdir: ', catDir
os.mkdir(catDir)
# TODO: Debug print, remove or change to a progress meter
# print 'Category: ', category
title = bookData[1].getText().encode('ascii', 'ignore').translate(None, badChars)
titleDir = os.path.join(catD | ir, title)
try:
os.stat(titleDir)
# print titleDir, 'Exists'
except:
# print 'Mkdir: ', titleDir
os.mkdir(titleDir)
# TODO: Debug print, remove or change to a progress meter
# print 'Title: ', title
links = bookData[2].select('a')
linkNum = 1
for j in links:
# TODO: Debug prints (x2), remove or change to a progress meter
# print j['href']
print 'Downloading book ' + str(i) + '/' + str(booksLen) + ', file ' + str(linkNum) + '/' + str(len(links)) + ' to', titleDir
# TODO: Implement file download here. Download j['href']
# Will first need to check if file is present or if we are being
# redirected. We are likely being redirected and will need to download
# from a different url
link = j['href']
try:
downloadFile(downloadFile2(link), titleDir)
except:
print "ERROR"
errors.append((category, title))
linkNum += 1
print '\n\nERROR REPORT:'
for i in errors:
print i
|
paradiseOffice/sandbox_API_v1.0 | paradise_office_site/sandbox_v1.0/cygnet_maker/cy_tests/test_time.py | Python | gpl-2.0 | 1,622 | 0.018496 | #!/usr/bin/python3.3
import unittest
import sys
sys.path.append("/home/hazel/Documents/new_linux_paradise/paradise_office_site/sandbox_v1.0/cygnet_maker/cy_data_validatio | n")
from datetime import time
from time_conv import Time
class TimeTestCase(unittest.TestCase):
''' Tests with numbered degrees of bad or good data, on a scale of 0=baddest to 10=goodest '''
def setUp(self):
self.time = Time.check_time("")
# A string
def test_vbad0(self):
self.time = Time.check_time("iutoeht")
correct_time = time(00, 00, 00)
self.asse | rtEqual( correct_time, self.time)
# An out of range whole number
def test_bad1(self):
self.time = Time.check_time("52304")
correct_time = time(00, 00, 00)
self.assertEqual( correct_time, self.time)
# Two out of range whole numbers
def test_bad2(self):
self.time = Time.check_time("70 80")
correct_time = time(23, 59, 00)
self.assertEqual( correct_time, self.time)
# hours, minutes and seconds formatted crap
def test_middle3(self):
self.time = Time.check_time("03 - 32/74")
correct_time = time(3, 32, 59)
self.assertEqual( correct_time, self.time)
# : in between hours and minutes
def test_good4(self):
self.time = Time.check_time("03:32:50")
correct_time = time(3, 32, 50)
self.assertEqual( correct_time, self.time)
# removing am or pm
def test_vgood5(self):
self.time = Time.check_time("3:35pm")
correct_time = time(15, 35, 00)
self.assertEqual( correct_time, self.time)
def tearDown(self):
self.time = ""
correct_time = ""
if __name__ == '__main__':
unittest.main()
|
Toshakins/wagtail | wagtail/wagtailadmin/tests/tests.py | Python | bsd-3-clause | 12,417 | 0.002175 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.core import mail
from django.core.urlresolvers import reverse, reverse_lazy
from django.test import TestCase, override_settings
from django.utils.translation import ugettext_lazy as _
from taggit.models import Tag
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailadmin.site_summary import PagesSummaryItem
from wagtail.wagtailadmin.utils import send_mail, user_has_any_page_permission
from wagtail.wagtailcore.models import Page, Site
class TestHome(TestCase, WagtailTestUtils):
def setUp(self):
# Login
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Welcome to the Test Site Wagtail CMS")
def test_admin_menu(self):
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
# check that media attached to menu items is correctly pulled in
self.assertContains(
response,
'<script type="text/javascript" src="/static/wagtailadmin/js/explorer-menu.js"></script>'
)
# check that custom menu items (including classname / attrs parameters) are pulled in
self.assertContains(
response,
'<a href="http://www.tomroyal.com/teaandkittens/" class="icon icon-kitten" data-fluffy="yes">Kittens!</a>'
)
# check that is_shown is respected on menu items
response = self.client.get(reverse('wagtailadmin_home') + '?hide-kittens=true')
self.assertNotContains(
response,
'<a href="http://www.tomroyal.com/teaandkittens/" class="icon icon-kitten" data-fluffy="yes">Kittens!</a>'
)
def test_never_cache_header(self):
# This tests that wagtailadmins global cache settings have been applied correctly
response = self.client.get(reverse('wagtailadmin_home'))
self.assertIn('private', response['Cache-Control'])
self.assertIn('no-cache', response['Cache-Control'])
self.assertIn('no-store', response['Cache-Control'])
self.assertIn('max-age=0', response['Cache-Control'])
def test_nonascii_email(self):
# Test that non-ASCII email addresses don't break the admin; previously these would
# cause a failure when generating Gravatar URLs
get_user_model().objects.create_superuser(username='snowman', email='☃@thenorthpole.com', password='password')
# Login
self.assertTrue(self.client.login(username='snowman', password='password'))
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
class TestPagesSummary(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get_request(self):
"""
Get a Django WSGI request that has been passed through middleware etc.
"""
return self.client.get('/admin/').wsgi_request
def test_page_summary_single_site(self):
request = self.get_request()
root_page = request.site.root_page
link = '<a href="{}">'.format(reverse('wagtailadmin_explore', args=[root_page.pk]))
page_summary = PagesSummaryItem(request)
self.assertIn(link, page_summary.render())
def test_page_summary_multiple_sites(self):
Site.objects.create(
hostname='example.com',
root_page=Page.objects.get(pk=1))
request = self.get_request()
link = '<a href="{}">'.format(reverse('wagtailadmin_explore_root'))
page_summary = PagesSummaryItem(request)
self.assertIn(link, page_summary.render())
def test_page_summary_zero_sites(self):
Site.objects.all().delete()
request = self.get_request()
link = '<a href="{}">'.format(reverse('wagtailadmin_explore_root'))
page_summary = PagesSummaryItem(request)
self.assertIn(link, page_summary.render())
class TestEditorHoo | ks(TestCase, WagtailTestUtils):
def setUp(self):
self.homepage = Page.objects.get(id=2)
self.login()
def test_editor_css_hooks_on_add(self):
response = self.client.get(reverse('wagtailadmin_pages:add', args= | ('tests', 'simplepage', self.homepage.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<link rel="stylesheet" href="/path/to/my/custom.css">')
def test_editor_js_hooks_on_add(self):
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.homepage.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<script src="/path/to/my/custom.js"></script>')
def test_editor_css_hooks_on_edit(self):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.homepage.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<link rel="stylesheet" href="/path/to/my/custom.css">')
def test_editor_js_hooks_on_edit(self):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.homepage.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<script src="/path/to/my/custom.js"></script>')
class TestSendMail(TestCase):
def test_send_email(self):
send_mail("Test subject", "Test content", ["nobody@email.com"], "test@email.com")
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "test@email.com")
@override_settings(WAGTAILADMIN_NOTIFICATION_FROM_EMAIL='anothertest@email.com')
def test_send_fallback_to_wagtailadmin_notification_from_email_setting(self):
send_mail("Test subject", "Test content", ["nobody@email.com"])
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "anothertest@email.com")
@override_settings(DEFAULT_FROM_EMAIL='yetanothertest@email.com')
def test_send_fallback_to_default_from_email_setting(self):
send_mail("Test subject", "Test content", ["nobody@email.com"])
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "yetanothertest@email.com")
def test_send_default_from_email(self):
send_mail("Test subject", "Test content", ["nobody@email.com"])
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "webmaster@localhost")
class TestTagsAutocomplete(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
Tag.objects.create(name="Test", slug="test")
def test_tags_autocomplete(self):
response = self.client.get(reverse('wagtailadmin_tag_autocomplete'), {
'term': 'test'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
data = json.loads |
AlvaroOdoo/openacademy-project | modulo01/__openerp__.py | Python | apache-2.0 | 881 | 0.001139 | # -*- coding: utf-8 -*-
{
'name': "Módulo de Ejemplo",
'summary': """Manage trainings""",
'description': """
Módulo01 for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "Alvaro Villavicencio Ramírez",
'website': "http://www.soluciones4gcom",
# Categories can be used to filte | r modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# f | or the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
# 'templates.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
}
|
texastribune/armstrong.base | armstrong/base/tests/context_processors.py | Python | bsd-3-clause | 3,000 | 0.004333 | from django.http import HttpRequest
import fudge
import random
from ._utils import TestCase
from .. import context_processors
from ..context_processors import media_url, static_url
class TestOfMediaUrlContextProcessor(TestCase):
def generate_fake_request(self, is_secure=False):
request = fudge.Fake(HttpRequest)
request.provides('is_secure').returns(is_secure)
return request
def generate_fake_media_settings_and_result(self, is_secure=False):
media_url_pattern = '%%sexample.com/media/%d/' % random.randint(100, 200)
fake_settings = fudge.Fake()
fake_settings.has_attr(**{
'MEDIA_URL': media_url_pattern % 'http://',
'SECURE_MEDIA_URL': media_url_pattern % 'https://secure.',
})
expected_result = {
'MEDIA_URL': getattr(fake_settings,
'SECURE_MEDIA_URL' if is_secure else 'MEDIA_URL'),
| }
return fake_settings, expected_result
def test_returns_media_url_from_settings(self):
request = self.generate_fake_request()
fake_settings, expected_result = self.generate_fake_media_settings_and_result()
with fudge.patched_context(context_processors, 'settings', fake_settings):
self.assertEquals(media_url(request), | expected_result)
def test_returns_secure_media_url_from_settings_on_is_secure(self):
request = self.generate_fake_request(is_secure=True)
fake_settings, expected_result = self.generate_fake_media_settings_and_result(is_secure=True)
with fudge.patched_context(context_processors, 'settings', fake_settings):
self.assertEquals(media_url(request), expected_result)
def generate_fake_static_settings_and_result(self, is_secure=False):
static_url_pattern = '%%sexample.com/static/%d/' % random.randint(100, 200)
fake_settings = fudge.Fake()
fake_settings.has_attr(**{
'STATIC_URL': static_url_pattern % 'http://',
'SECURE_STATIC_URL': static_url_pattern % 'https://secure.',
})
expected_result = {
'STATIC_URL': getattr(fake_settings,
'SECURE_STATIC_URL' if is_secure else 'STATIC_URL'),
}
return fake_settings, expected_result
def test_returns_static_url_from_settings(self):
request = self.generate_fake_request()
fake_settings, expected_result = self.generate_fake_static_settings_and_result()
with fudge.patched_context(context_processors, 'settings', fake_settings):
self.assertEquals(static_url(request), expected_result)
def test_returns_secure_static_url_from_settings_on_is_secure(self):
request = self.generate_fake_request(is_secure=True)
fake_settings, expected_result = self.generate_fake_static_settings_and_result(is_secure=True)
with fudge.patched_context(context_processors, 'settings', fake_settings):
self.assertEquals(static_url(request), expected_result)
|
DanielKeep/rust-numeric-float | update-docs.py | Python | mit | 5,819 | 0.003437 | #!/usr/bin/env python2
import distutils.dir_util
import os
import shutil
import subprocess
import sys
import tempfile
import time
DOC_ARGS = '--no-deps'
DOC_FEATURES = "conv num rustc-serialize serde"
DOC_TARGET_BRANCH = 'gh-pages'
TEMP_CHECKOUT_PREFIX = 'gh-pages-checkout-'
TEMP_OUTPUT_PREFIX = 'gh-pages-generated-'
USE_ANSI = True if sys.platform != 'win32' else os.environ.get('FORCE_ANSI', '') != ''
TRACE_UPDATE_DOCS = os.environ.get('TRACE_UPDATE_DOCS', '') != ''
def sh(cmd):
msg_trace('sh(%r)' % cmd)
try:
subprocess.check_call(cmd, shell=True)
except:
msg_trace('FAILED!')
raise
def sh_eval(cmd, codec='utf-8', dont_strip=False):
msg_trace('sh_eval(%r)' % cmd)
result = None
try:
result = subprocess.check_output(cmd, shell=True).decode(codec)
if not dont_strip:
result = result.strip()
except:
msg_trace('FAILED!')
raise
return result
def msg(*args):
if USE_ANSI: sys.stdout.write('\x1b[1;34m')
sys.stdout.write('> ')
if USE_ANSI: sys.stdout.write('\x1b[1;32m')
for arg in args:
sys.stdout.write(str(arg))
if USE_ANSI: sys.stdout.write('\x1b[0m')
sys.stdout.write('\n')
sys.stdout.flush()
def msg_trace(*args):
if TRACE_UPDATE_DOCS:
if USE_ANSI: sys.stderr.write('\x1b[1;31m')
sys.stderr.write('$ ')
if USE_ANSI: sys.stderr.write('\x1b[0m')
for arg in args:
sys.stderr.write(str(arg))
sys.stderr.write('\n')
sys.stderr.flush()
def copytree(src, dst):
msg_trace('copytree(%r, %r)' % (src, dst))
distutils.dir_util.copy_tree(src=src, dst=dst)
def really_rmtree(path):
msg_trace('really_rmtree(%r)' % path)
WAIT_TIME_SECS = 1.0
MAX_TRIES = 10
def on_error(func, path, exc_info):
"""
Error handler for ``shutil.rmtree``.
If the error is due to an access error (read only file)
it attempts to add write permission and then retries.
If the error is for another reason it re-raises the error.
Usage: ``shutil.rmtree(path, onerror=on_error)``
From <http://stackoverflow.com/a/2656405>_.
"""
import stat
if not os.access(path, os.W_OK):
# Is the error an access error ?
os.chmod(path, stat.S_IWUSR)
func(path)
else:
raise
for _ in range(MAX_TRIES):
failed = True
try:
msg_trace('shutil.rmtree(%r)' % path)
shutil.rmtree(path, onerror=on_error)
failed = False
except WindowsError:
time.sleep(WAIT_TIME_SECS)
if not failed: return
msg('Warning: failed to remove directory %r' % path)
def init_doc_branch():
msg("Initialising %s branch" % DOC_TARGET_BRANCH)
dir = os.getcwdu()
msg_trace('dir = %r' % dir)
tmp = tempfile.mkdtemp(prefix=TEMP_CHECKOUT_PREFIX)
msg_trace('tmp = %r' % tmp)
try:
msg("Cloning into a temporary directory...")
sh('git init -q "%s"' % tmp)
msg_trace('os.chdir(%r)' % tmp)
os.chdir(tmp)
sh('git checkout -q --orphan "%s"' % DOC_TARGET_BRANCH)
sh('git commit -qm "Initial commit." --allow-empty')
sh('git remote add origin "%s"' % dir)
sh('git push -q origin gh-pages')
finally:
msg('Cleaning up...')
msg_trace('os.chdir(%r)' % dir)
os.chdir(dir)
msg_trace('shutil.rmtree(%r)' % tmp)
really_rmtree(tmp)
msg('%s is ready. Continuing.' % DOC_TARGET_BRANCH)
def main():
if sh_eval('git symbolic-ref --short HEAD') != u'master':
msg('Not on master; doing nothing.')
return 0
# Sanity check: does the doc branch exist at all?
branches = {b[2:].strip() for b in sh_eval('git branch', dont_strip=True).splitlines()}
msg_trace('branches = %r' % branches)
if DOC_TARGET_BRANCH not in branches:
init_doc_branch()
last_rev = sh_eval('git rev-parse HEAD')
last_msg = sh_eval('git log -1 --pretty=%B')
msg_trace('last_rev = %r' % last_rev)
msg_trace('last_msg = %r' % last_msg)
dir = os.getcwdu()
msg_trace('dir = %r' % dir)
|
tmp1 = tempfile.mkdtemp(prefix=TEMP_CHECKOUT_PREFIX)
tmp2 = tempfile.mkdtemp(prefix=TEMP_OUTPUT_PREFIX)
msg_trace('tmp1 = %r' % tmp1)
msg_trace('tmp2 = | %r' % tmp2)
try:
msg("Cloning into a temporary directory...")
sh('git clone -qb "%s" "%s" "%s"' % (DOC_TARGET_BRANCH, dir, tmp1))
msg_trace('os.chdir(%r)' % tmp1)
os.chdir(tmp1)
sh('git checkout -q master')
msg("Generating documentation...")
args = '%s --features="%s"' % (DOC_ARGS, DOC_FEATURES)
sh('cargo doc %s' % DOC_ARGS)
tmp1_target_doc = '%s/target/doc' % tmp1
msg_trace('shutil.move(%r, %r)' % (tmp1_target_doc, tmp2))
shutil.move(tmp1_target_doc, tmp2)
msg('Updating %s...' % DOC_TARGET_BRANCH)
sh('git checkout -q "%s"' % DOC_TARGET_BRANCH)
sh('git clean -dfq')
tmp2_doc = '%s/doc' % tmp2
msg_trace('copytree(%r, %r)' % (tmp2_doc, './doc'))
copytree(tmp2_doc, './doc')
msg('Committing changes...')
sh('git add .')
sh('git commit --amend -m "Update docs for %s" -m "%s"' % (last_rev[:7], last_msg))
sh('git push -fqu origin "%s"' % DOC_TARGET_BRANCH)
finally:
msg('Cleaning up...')
msg_trace('os.chdir(%r)' % dir)
os.chdir(dir)
msg_trace('shutil.rmtree(%r)' % tmp2)
really_rmtree(tmp2)
msg_trace('shutil.rmtree(%r)' % tmp1)
really_rmtree(tmp1)
msg('Done. Use `git push origin %s` to update live documentation.' % DOC_TARGET_BRANCH)
if __name__ == '__main__':
sys.exit(main())
|
torn2537/AnimationJava | LSTM2.py | Python | mit | 3,840 | 0.00026 | from __future__ import print_function
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.contrib import rnn
import time
from datetime import timedelta
# Import MNIST data
import matplotlib.pyplot as plt
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
# Training Parameters
learning_rate = 0.005
training_steps = 15000
batch_size = 128
display_step = 200
# Network Parameters
num_input = 28 # MNIST data input (img shape: 28*28)
timesteps = 28 # timesteps
num_hidden = 128 # hidden layer num of features
num_classes = 10 # MNIST total classes (0-9 digits)
# tf Graph input
X = tf.placeholder("float", [None, timesteps, num_input])
Y = tf.placeholder("float", [None, num_classes])
# Define weights
weights = {
'out': tf.Variable(tf.random_normal([num_hidden, num_classes]))
}
biases = {
'out': tf.Variable(tf.random_normal([num_classes]))
}
def RNN(x, weights, biases):
# Prepare data shape to match `rnn` function requirements
# Current data input shape: (batch_size, timesteps, n_input)
# Required shape: 'timesteps' tensors list of shape (batch_size, n_input)
# Unstack to get a list of 'timesteps' tensors of shape (batch_size,
# n_input)
x = tf.unstack(x, timesteps, 1)
# Define a lstm cell with tensorflow
lstm_cell = rnn.BasicLSTMCell(num_hidden, forget_bias=1.0)
# Get lstm cell output
outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32)
# Linear activation, using rnn inner loop last output
return tf.matmul(outputs[-1], weights['out']) + biases['out']
logits = RNN(X, weights, biases)
prediction = tf.nn.softmax(logits)
# Define loss and optimizer
loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=logits, labels=Y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss_op)
# Evaluate model (with test logits, for dropout to be disabled)
correct_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# Initialize the variables (i.e. assign their default value)
init = tf.global_variables_initializer()
loss_group = []
epoch_group = []
# Start training
with tf.Session() as sess:
# Run the initializer
sess.run(init)
start_time = time.time()
for step in range(1, training_steps + 1):
tf.set_random_seed(23)
batch_x, batch_y = mnist.train.next_batch(batch_size)
# Reshape data to get 28 seq of 28 elements
batch_x = batch_x.reshape((batch_size, timesteps, num_input))
# Run optimization op (backprop)
sess.run(train_op, feed_dict={X: batch_x, Y: batch_y})
if step % display_step == 0 or step == 1:
# Calculate batch loss and accuracy
loss, acc = sess.run([loss_op, accuracy], feed_dict={X: batch_x,
Y: batch_y})
loss_group.append(loss)
epoch_group.append(step)
print("Step " + str(step) + ", Minibatch Loss= " +
| "{:.4f}".format(loss) + ", Training Accuracy= " +
"{:.3f}".format(acc))
print("Optimization Finished!")
print(loss_group)
print(epoch_group)
plt.plot(epoch_group, loss_group)
plt.show()
end_time = time.time()
time_dif = end_time - start_time
print("Time usage: " + str(timedelta(seconds=int(round(tim | e_dif)))))
# Calculate accuracy for 128 mnist test images
test_len = 128
test_data = mnist.test.images[:test_len].reshape(
(-1, timesteps, num_input))
test_label = mnist.test.labels[:test_len]
print("Testing Accuracy:",
sess.run(accuracy, feed_dict={X: test_data, Y: test_label}))
|
vinzenz/prototype | leapp/snactor/commands/workflow/__init__.py | Python | apache-2.0 | 313 | 0.003195 | from lea | pp.utils.clicmd import command
_LONG_DESCRIPTION = '''
Leapp Workflow related commands.
For more information please consider reading the documentation at:
https://red.ht/leapp-docs
'''
@command('workflow', help='Workflow related commands', description=_LONG_DESCRIPTION)
def workflow(*args | ):
pass
|
acressity/acressity | narratives/models.py | Python | gpl-3.0 | 5,401 | 0.003888 | from django.db import models
from django.core.urlresolvers import reverse
from django import forms
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.utils import timezone
from django.core.paginator import Paginator
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.conf import settings
from experiences.models import Experience
from photologue.models import Gallery, Photo
from acressity.utils import embed_string, build_full_absolute_url
class Narrative(models.Model):
'''
Term describing description of an aspect of an experience. Think of these as pages or sections
within a chapter; they are the sustenance of the experience. Examples of narratives include a note,
update, thought, plan, itinerary, journal entry, publication, (ad infinitum) about the experience...
'''
body = models.TextField(help_text=_('The content of narrative. Where information regarding any thoughts, feelings, updates, etc can be added.'), null=False)
title = models.CharField(max_length=255, blank=True, null=True, help_text=_('Title of the narrative. If none given, defaults to date created.'))
experience = models. | ForeignKey(Experience, related_name='narratives')
author = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='narra | tives', null=False)
date_created = models.DateTimeField(default=timezone.now, null=False, blank=True)
date_modified = models.DateTimeField(auto_now=True, help_text='Updated every time object is saved')
category = models.CharField(max_length=50, null=True, blank=True, help_text='Optional information used to classify and order the narratives within the experience.')
gallery = models.OneToOneField(Gallery, on_delete=models.SET_NULL, null=True)
is_public = models.BooleanField(null=False, default=True, help_text='Public narratives will be displayed in the default views. Private ones are only seen by yourself and the other explorers in the narrative\'s experience. Changing the status of the narrative also changes the status of the photo gallery.')
password = models.CharField(_('password'), max_length=128, null=True, blank=True, help_text=_('Submitting the correct password provides access to this narrative if it is private.'))
taste_len = 250
def __init__(self, *args, **kwargs):
# Allows the quicker check of whether or not a particular field has changed
# Considering using this in the method controlling status of is_public
super(Narrative, self).__init__(*args, **kwargs)
self.__original_is_public = self.is_public
class Meta:
# ordering = ['category']
get_latest_by = 'date_created'
def __unicode__(self):
return self.title
def __str__(self):
return self.__unicode__()
def model(self):
return self.__class__.__name__
def get_experience_author(self):
return get_user_model().objects.get(pk=self.experience.author_id)
def taste(self):
return u'{0}...'.format(self.body[:self.taste_len]) if self.needs_shortening() else self.body
def needs_shortening(self):
return len(self.body) > self.taste_len
def is_author(self, explorer):
if explorer.is_authenticated():
if explorer == self.author:
return True
return False
def save(self, *args, **kwargs):
if not self.title:
self.title = timezone.now().strftime('%B %d, %Y')
if self.__original_is_public != self.is_public:
if self.gallery:
self.gallery.is_public = self.is_public
self.gallery.save()
super(Narrative, self).save(*args, **kwargs)
def get_next_narrative(self):
try:
narrative = self.get_next_by_date_created(experience_id=self.experience_id)
except Narrative.DoesNotExist:
narrative = None
return narrative
def get_previous_narrative(self):
try:
narrative = self.get_previous_by_date_created(experience_id=self.experience_id)
except Narrative.DoesNotExist:
narrative = None
return narrative
def get_next_public_narrative(self):
narrative = self.get_next_by_date_created(experience_id=self.experience.id)
while not narrative.is_public:
narrative = narrative.get_next_by_date_created(experience_id=narrative.experience.id)
if not narrative.is_public:
narrative = None
return narrative
def get_previous_public_narrative(self):
narrative = self.get_previous_by_date_created(experience_id=self.experience.id)
while not narrative.is_public:
narrative = narrative.get_previous_by_date_created(experience_id=narrative.experience.id)
if not narrative.is_public:
narrative = None
return narrative
def embedded_narrative(self):
return embed_string(self.body)
def get_absolute_url(self):
# Despite the name, returns url relative to root
return reverse('narrative', args=[self.pk])
def get_full_absolute_url(self):
# Return the complete url with scheme and domain
return build_full_absolute_url(self.get_absolute_url())
|
hhorak/rpmquality | setup.py | Python | mit | 1,001 | 0.022977 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup, find_packages
except:
from distutils.core import setup, find_packages
setup(
name = 'rpmquality',
version = '0.1',
description = 'Short description',
long_description = 'Long description | ',
keywords = 'some, keywords',
author = 'Honza Horak',
author_email = 'hhorak@redhat.com',
license = 'MIT',
packages = find_packages(),
entry_points={'console_scripts':['rpmquality = rpmquality.bin:main']},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI A | pproved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development :: Build Tools',
'Topic :: System :: Software Distribution',
]
)
|
otakup0pe/aomi | aomi/seed_action.py | Python | mit | 7,172 | 0 | """ The aomi "seed" loop """
from __future__ import print_function
import os
import difflib
import logging
from shutil import rmtree
import tempfile
from termcolor import colored
import yaml
from future.utils import iteritems # pylint: disable=E0401
from aomi.helpers import dict_unicodeize
from aomi.filez import thaw
from aomi.model import Context
from aomi.template import get_secretfile, render_secretfile
from aomi.model.resource import Resource
from aomi.model.backend import CHANGED, ADD, DEL, OVERWRITE, NOOP, \
CONFLICT, VaultBackend
from aomi.model.auth import Policy
from aomi.model.aws import AWSRole
from aomi.validation import is_unicode
import aomi.error
import aomi.exceptions
LOG = logging.getLogger(__name__)
def auto_thaw(vault_client, opt):
"""Will thaw into a temporary location"""
icefile = opt.thaw_from
if not os.path.exists(icefile):
raise aomi.exceptions.IceFile("%s missing" % icefile)
thaw(vault_client, icefile, opt)
return opt
def seed(vault_client, opt):
"""Will provision vault based on the definition within a Secretfile"""
if opt.thaw_from:
opt.secrets = tempfile.mkdtemp('aomi-thaw')
auto_thaw(vault_client, opt)
Context.load(get_secretfile(opt), opt) \
.fetch(vault_client) \
.sync(vault_client, opt)
if opt.thaw_from:
rmtree(opt.secrets)
def render(directory, opt):
"""Render any provided template. This includes the Secretfile,
Vault policies, and inline AWS roles"""
if not os.path.exists(directory) and not os.path.isdir(directory):
os.mkdir(directory)
a_secretfile = render_secretfile(opt)
s_path = "%s/Secretfile" % directory
LOG.debug("writing Secretfile to %s", s_path)
open(s_path, 'w').write(a_secretfile)
ctx = Context.load(yaml.safe_load(a_secretfile), opt)
for resource in ctx.resources():
if not resource.present:
continue
if issubclass(type(resource), Policy):
if not os.path.isdir("%s/policy" % directory):
os.mkdir("%s/policy" % directory)
filename = "%s/policy/%s" % (directory, resource.path)
open(filename, 'w').write(resource.obj())
LOG.debug("writing %s to %s", resource, filename)
elif issubclass(type(resource), AWSRole):
if not os.path.isdir("%s/aws" % directory):
os.mkdir("%s/aws" % directory)
if 'policy' in resource.obj():
filename = "%s/aws/%s" % (directory,
os.path.basename(resource.path))
r_obj = resource.obj()
if 'policy' in r_obj:
LOG.debug("writing %s to %s", resource, filename)
open(filename, 'w').write(r_obj['policy'])
def export(vault_client, opt):
"""Export contents of a Secretfile from the Vault server
into a specified directory."""
ctx = Context.load(get_secretfile(opt), opt) \
.fetch(vault_client)
for resource in ctx.resources():
resource.export(opt.directory)
def maybe_colored(msg, color, opt):
"""Maybe it will render in color maybe it will not!"""
if opt.monochrome:
return msg
return colored(msg, color)
def normalize_val(val):
"""Normalize JSON/YAML derived values as they pertain
to Vault resources and comparison operations """
if is_unicode(val) and val.isdigit():
return int(val)
elif isinstance(val, list):
return ','.join(val)
elif val is None:
return ''
return val
def details_dict(obj, existing, ignore_missing, opt):
"""Output the changes, if any, for a dict"""
existing = dict_unicodeize(existing)
obj = dict_unicodeize(obj)
for ex_k, ex_v in iteritems(existing):
new_value = normalize_val(obj.get(ex_k))
og_value = normalize_val(ex_v)
if ex_k in obj and og_value != new_value:
print(maybe_colored("-- %s: %s" % (ex_k, og_value),
'red', opt))
print(maybe_colored("++ %s: %s" % (ex_k, new_value),
'green', opt))
if (not ignore_missing) and (ex_k not in obj):
print(maybe_colored("-- %s: %s" % (ex_k, og_value),
'red', opt))
for ob_k, ob_v in iteritems(obj):
val = normalize_val(ob_v)
if ob_k not in existing:
print(maybe_colored("++ %s: %s" % (ob_k, val),
'green', opt))
return
def maybe_details(resource, opt):
"""At the first level of verbosity this will print out detailed
change information on for the specified Vault resource"""
if opt.verbose == 0:
return
if not resource.present:
return
obj = None
existing = None
if isinstance(resource, Resource):
obj = resource.obj()
existing = resource.existing
elif isinstance(resource, VaultBackend):
obj = resource.config
existing = resource.existing
if not obj:
return
if is_unicode(existing) and is_unicode(obj):
a_diff = difflib.unified_diff(existing.splitlines(),
obj.splitlines(),
lineterm='')
for line in a_diff:
if line.startswith('+++') or line.startswith('---'):
continue
if line[0] == '+':
print(maybe_colored("++ %s" % line[1:], 'green', opt))
elif line[0] == '-':
print(maybe_colored("-- %s" % line[1:], 'red', opt))
else:
print(line)
elif isinstance(existing, dict):
ignore_missing = isinstance(resource, VaultBackend)
details_dict(obj, existing, ignore_missing, opt)
def diff_a_thing(thing, opt):
"""Handle the diff action for a single thing. It may be a Vault backend
implementation or it may be a Vault data resource"""
changed = thing.diff()
if changed == ADD:
print("%s %s" % (maybe_colored("+", "green", opt), str(thing)))
elif changed == DEL:
print("%s %s" % (maybe_colored("-", "red", opt), str(thing)))
elif changed == CHANGED:
print("%s %s" % (maybe_colored("~", "yellow", opt), str(thing)))
elif changed == OVERWRITE:
print("%s %s" % (maybe_colored("+", "yellow", opt), str(thing)))
elif changed == CONFLICT:
print("%s %s" | % (maybe_colored("!", "red", opt), str(thing)))
if changed != OVERWRITE and changed != NOOP:
maybe_details(thing, opt)
def diff(vault_client, opt):
"""Derive a comparison between what is represented in the Secretfile
and what is actually live on a Vault instance"""
if opt.thaw_from:
opt.secrets = tempfile.mkdtemp('aomi-thaw | ')
auto_thaw(vault_client, opt)
ctx = Context.load(get_secretfile(opt), opt) \
.fetch(vault_client)
for backend in ctx.mounts():
diff_a_thing(backend, opt)
for resource in ctx.resources():
diff_a_thing(resource, opt)
if opt.thaw_from:
rmtree(opt.secrets)
|
semkiv/heppy_fcc | background_Bs2DsDsK_with_Ds2TauNu_analysis_cfg.py | Python | gpl-3.0 | 3,807 | 0.023903 | #!/usr/bin/env python
"""
Configuration script for the analyzer of B0s -> K*0 Ds+ Ds- background events
| | |-> tau- nu
| | |-> pi- pi- pi+ nu
| |-> tau+ nu
| |-> pi+ pi+ pi- nu
|-> K+ pi-
Note: it is supposed to be used within heppy_fcc framework
"""
import os
import heppy.framework.config as cfg
import logging
from ROOT import gSystem
from EventStore import EventStore as Events
from heppy_fcc.analyzers.BackgroundBs2DsDsKWithDs2TauNuAnalyzer import BackgroundBs2DsDsKWithDs2TauNuAnalyzer
logging.basicConfig(level=logging.WARNING)
# input component
# several input components can be declared and added to the list of selected components
input_component = cfg.Component('ILD-like', files = ['/afs/cern.ch/work/a/ansemkiv/private/FCC/analysis/background_Bs2DsDsK_with_Ds2TauNu_100k.root'])
selected_components = [input_component]
# analyzers
# analyzer for Bs -> Ds Ds K* events
bgana = cfg.Analyzer(BackgroundBs2DsDsKWithDs2TauNuAnalyzer,
smear_momentum = True,
| momentum_x_resolution = 0.01,
momentum_y_resolution = 0.01,
momentum_z_resolution = 0.01,
smear_pv = True,
# IDL-like res
pv_x_resolution = 0.0025,
pv_y_resolution = 0.0025,
pv_z_resolution = 0.0025,
# progressive res
# pv_x_resolution = 0.001,
# pv_y_reso | lution = 0.001,
# pv_z_resolution = 0.001,
# outstanding res
# pv_x_resolution = 0.0005,
# pv_y_resolution = 0.0005,
# pv_z_resolution = 0.0005,
smear_sv = True,
# IDL-like res
sv_x_resolution = 0.007,
sv_y_resolution = 0.007,
sv_z_resolution = 0.007,
# progressive res
# sv_x_resolution = 0.003,
# sv_y_resolution = 0.003,
# sv_z_resolution = 0.003,
# outstanding res
# sv_x_resolution = 0.0015,
# sv_y_resolution = 0.0015,
# sv_z_resolution = 0.0015,
smear_tv = True,
# IDL-like res
tv_x_resolution = 0.005,
tv_y_resolution = 0.005,
tv_z_resolution = 0.005,
# progressive res
# tv_x_resolution = 0.002,
# tv_y_resolution = 0.002,
# tv_z_resolution = 0.002,
# outstanding res
# tv_x_resolution = 0.001,
# tv_y_resolution = 0.001,
# tv_z_resolution = 0.001,
stylepath = os.environ.get('FCC') + 'lhcbstyle.C',
tree_name = 'Events',
tree_title = 'Events',
mc_truth_tree_name = 'MCTruth',
mc_truth_tree_title = 'MC Truth',
verbose = False)
# definition of a sequence of analyzers, the analyzers will process each event in this order
sequence = cfg.Sequence([bgana])
# finalization of the configuration object.
gSystem.Load('libdatamodel')
config = cfg.Config(components = selected_components, sequence = sequence, services = [],events_class = Events)
|
saks/hb | records/views.py | Python | mit | 1,196 | 0.001672 | from djmoney.money import Money
from rest_framework import permissions, viewsets
from .models import Record
from .serializers import RecordSerializer
class RecordViewSet(viewsets.ModelViewSet):
serializer_class = RecordSerializer
permission_classes = (permissions.IsAuthenticated,)
queryset = Record.objects.order_by('-created_at').all()
def get_queryset(self):
return Record.objects.filter(user=self.request.user).order_by('-created_at')
def _save(self, serializer):
'''
{
"transaction_type": "EXP",
"tags":["books"],
"amount":{
"amount": 15, "curren | cy": { "code":"CAD", "name": "foo" }
}
}
'''
amount = self.request.data.get('amount')
| tags = self.request.data.get('tags')
if amount and 'amount' in amount.keys() and 'currency' in amount.keys():
amount = Money(amount['amount'], amount['currency']['code'])
serializer.save(tags=tags, user=self.request.user, amount=amount)
def perform_create(self, serializer):
self._save(serializer)
def perform_update(self, serializer):
self._save(serializer)
|
juhojama/sensible-shroom | generator.py | Python | apache-2.0 | 6,684 | 0.005236 | #!/usr/bin/env python
import os
import sys
import collections
from flask import g, Flask, render_template, url_for, abort, redirect, request
from flask_frozen import Freezer
from flask_wtf import Form
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, ValidationError
import markdown
from werkzeug import cached_property
import yaml
from werkzeug.serving import run_simple
from functools import wraps
POSTS_FILE_EXTENSION = '.md'
basedir = os.path.abspath(os.path.dirname(__file__))
### Main app ###
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'app.db')
app.config['SQLALCHEMY_MIGRATE_REPO'] = os.path.join(basedir, 'db_repository')
db = SQLAlchemy(app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
def __init__(self, nickname, email):
self.username = username
self.email = email
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def get_id(self):
try:
return unicode(self.id) # python 2
except NameError:
return str(self.id) # python 3
def __repr__(self):
return '<User %r>' % (self.nickname)
class SortedDict(collections.MutableMapping):
def __init__(self, items=None, key=None, reverse=False):
self._items = {}
self._keys = []
if key:
self._key_fn = lambda k: key(self._items[k])
else:
self._key_fn = lambda k: self._items[k]
self._reverse = reverse
if items is not None:
self.update(items)
def __getitem__(self, key):
return self._items[key]
def __setitem__(self, key, value):
self._items[key] = value
if key not in self._keys:
self._keys.append(key)
self._keys.sort(key=self._key_fn, reverse=self._reverse)
def __delitem__(self, key):
self._items.pop(key)
self._keys.remove(key)
def __len__(self):
return len(self._keys)
def __iter__(self):
for key in self._keys:
yield key
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._items)
class Blog(object):
def __init__(self, app, root_dir='', file_ext=POSTS_FILE_EXTENSION):
self.root_dir = root_dir
self.file_ext = file_ext
self._app = app
self._cache = SortedDict(key=lambda p: p.date, reverse=True)
self._initialize_cache()
@property
def posts(self):
return self._cache.values()
def get_post_or_404(self, path):
# palauttaa Post objektn annetusta polusta tai palauttaa NotFound -virheen
try:
return self._cache[path]
except KeyError:
abort(404)
def _initialize_cache(self):
#Walks the root dir and adds all posts to cache
for (root, dirpaths, filepaths) in os.walk(self.root_dir):
for filepath in filepaths:
filename, ext = os.path.splitext(filepath)
if ext == self.file_ext:
path = os.path.join(root, filepath).replace(self.root_dir, '')
post = Post(path, root_dir=self.root_dir)
self._cache[post.urlpath] = post
class Post(object):
def __init__(self, path, root_dir=''):
self.urlpath = os.path.splitext(path.strip('/'))[0]
self.filepath = os.path.join(root_dir, path.strip('/'))
self._initialize_metadata()
@cached_property
def html(self):
with open(self.filepath, 'r') as fin:
content = fin.read().split('\n\n', 1)[1].strip()
return markdown.markdown(content, extensions=['codehilite'])
@property
def url(self):
return url_for('post', path=self.urlpath)
def _initialize_metadata(self): #_Private function!! :P
content = ''
with open(self.filepath, 'r') as fin:
for line in fin:
if not line.strip():
break
content += line
self.__dict__.update(yaml.load(content))
class EmailPasswordForm(Form):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
### Login manager ###
login_manager = LoginManager()
login_manager.init_app(app)
### Blog ###
blog = Blog(app, root_dir='posts')
### Freezer ###
freezer = Freezer(app)
app.config['SECRET_KEY'] = 'salaisuus'
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
@app.template_filter('date')
def format_date(value, format='%d %B, %Y'): # Paivamaaran kaunistelu
return value.strftime(format)
|
@app.route('/')
def index():
return render_template("index.html",
posts=blog.posts,
title='Blogialusta|Tervetuloa',
user='Testi-User')
@app.route('/blog/<path:path>/')
def post(path):
post = blog.get_post_or_404(path)
return render_template("post.html",
post=post) # Post-luokan insta | nssin passaus templatelle
### / User management --> ###
@app.route('/register/')
def register():
return render_template("register.html",
title='Rekisteroidy')
@app.route('/login/', methods=["GET", "POST"])
def login():
form = EmailPasswordForm()
if form.validate_on_submit():
flask.flash('Kirjauduit sisaan.')
next = flask.request.args.get('next')
if not is_safe_url(next):
return flask.abort(400)
return render_template(url_for('index'))
return render_template("login.html",
title='Kirjaudu',
form=form)
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if g.user is None:
return redirect(url_for('login', next=request.url))
return f(*args, **kwargs)
return decorated_function
@app.route('/logout/')
@login_required
def logout():
pass
### <-- User management / ###
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'build':
freezer.freeze()
else:
post_files = [post.filepath for post in blog.posts]
app.run(port=5000, host='0.0.0.0', debug=True, extra_files=post_files)
|
pexip/os-kombu | t/unit/transport/test_mongodb.py | Python | bsd-3-clause | 16,952 | 0 | from __future__ import absolute_import, unicode_literals
import datetime
import pytest
from case import MagicMock, call, patch, skip
from kombu import Connection
from kombu.five import Empty
def _create_mock_connection(url='', **kwargs):
from kombu.transport import mongodb # noqa
class _Channel(mongodb.Channel):
# reset _fanout_queues for each instance
_fanout_queues = {}
collections = {}
now = datetime.datetime.utcnow()
def _create_client(self):
mock = MagicMock(name='client')
# we need new mock object for every col | lection
def get_collection(name):
try:
return self.collections[name]
except KeyError:
mock = self.collections[name] = MagicMock(
name='collection:%s' % name)
return mock
mock.__getite | m__.side_effect = get_collection
return mock
def get_now(self):
return self.now
class Transport(mongodb.Transport):
Channel = _Channel
return Connection(url, transport=Transport, **kwargs)
@skip.unless_module('pymongo')
class test_mongodb_uri_parsing:
def test_defaults(self):
url = 'mongodb://'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'kombu_default'
assert hostname == 'mongodb://127.0.0.1'
def test_custom_host(self):
url = 'mongodb://localhost'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'kombu_default'
def test_custom_database(self):
url = 'mongodb://localhost/dbname'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'dbname'
def test_custom_credentials(self):
url = 'mongodb://localhost/dbname'
channel = _create_mock_connection(
url, userid='foo', password='bar').default_channel
hostname, dbname, options = channel._parse_uri()
assert hostname == 'mongodb://foo:bar@localhost/dbname'
assert dbname == 'dbname'
def test_correct_readpreference(self):
url = 'mongodb://localhost/dbname?readpreference=nearest'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert options['readpreference'] == 'nearest'
class BaseMongoDBChannelCase:
def _get_method(self, cname, mname):
collection = getattr(self.channel, cname)
method = getattr(collection, mname.split('.', 1)[0])
for bit in mname.split('.')[1:]:
method = getattr(method.return_value, bit)
return method
def set_operation_return_value(self, cname, mname, *values):
method = self._get_method(cname, mname)
if len(values) == 1:
method.return_value = values[0]
else:
method.side_effect = values
def declare_droadcast_queue(self, queue):
self.channel.exchange_declare('fanout_exchange', type='fanout')
self.channel._queue_bind('fanout_exchange', 'foo', '*', queue)
assert queue in self.channel._broadcast_cursors
def get_broadcast(self, queue):
return self.channel._broadcast_cursors[queue]
def set_broadcast_return_value(self, queue, *values):
self.declare_droadcast_queue(queue)
cursor = MagicMock(name='cursor')
cursor.__iter__.return_value = iter(values)
self.channel._broadcast_cursors[queue]._cursor = iter(cursor)
def assert_collection_accessed(self, *collections):
self.channel.client.__getitem__.assert_has_calls(
[call(c) for c in collections], any_order=True)
def assert_operation_has_calls(self, cname, mname, calls, any_order=False):
method = self._get_method(cname, mname)
method.assert_has_calls(calls, any_order=any_order)
def assert_operation_called_with(self, cname, mname, *args, **kwargs):
self.assert_operation_has_calls(cname, mname, [call(*args, **kwargs)])
@skip.unless_module('pymongo')
class test_mongodb_channel(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection()
self.channel = self.connection.default_channel
# Tests for "public" channel interface
def test_new_queue(self):
self.channel._new_queue('foobar')
self.channel.client.assert_not_called()
def test_get(self):
import pymongo
self.set_operation_return_value('messages', 'find_and_modify', {
'_id': 'docId', 'payload': '{"some": "data"}',
})
event = self.channel._get('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'find_and_modify',
query={'queue': 'foobar'},
remove=True,
sort=[
('priority', pymongo.ASCENDING),
],
)
assert event == {'some': 'data'}
self.set_operation_return_value('messages', 'find_and_modify', None)
with pytest.raises(Empty):
self.channel._get('foobar')
def test_get_fanout(self):
self.set_broadcast_return_value('foobar', {
'_id': 'docId1', 'payload': '{"some": "data"}',
})
event = self.channel._get('foobar')
self.assert_collection_accessed('messages.broadcast')
assert event == {'some': 'data'}
with pytest.raises(Empty):
self.channel._get('foobar')
def test_put(self):
self.channel._put('foobar', {'some': 'data'})
self.assert_collection_accessed('messages')
self.assert_operation_called_with('messages', 'insert', {
'queue': 'foobar',
'priority': 9,
'payload': '{"some": "data"}',
})
def test_put_fanout(self):
self.declare_droadcast_queue('foobar')
self.channel._put_fanout('foobar', {'some': 'data'}, 'foo')
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with('broadcast', 'insert', {
'queue': 'foobar', 'payload': '{"some": "data"}',
})
def test_size(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._size('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'find', {'queue': 'foobar'},
)
assert result == 77
def test_size_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
cursor.get_size.return_value = 77
self.channel._broadcast_cursors['foobar'] = cursor
result = self.channel._size('foobar')
assert result == 77
def test_purge(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._purge('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'remove', {'queue': 'foobar'},
)
assert result == 77
def test_purge_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
cursor.get_size.return_value = 77
self.channel._broadcast_cursors['foobar'] = cursor
result = self.channel._purge('foobar')
cursor.purge.assert_any_call()
assert result == 77
def test_get_table(self):
state_table = [('foo', '*', 'foo')]
stored_table = [('bar', '*', 'bar')]
self.channel.exchange_declare('test_exchange')
self.channel.state.exchanges['test_exchange']['table'] = state_table
self.set_operation_return_value('routing', 'find', [{
'_id': 'docId',
'routing_key': stored_table[0][0],
'pattern': stored_table[0][1],
'queue' |
biswajitsahu/kuma | vendor/packages/git/head.py | Python | mpl-2.0 | 2,739 | 0.00073 | # head.py
# Copyright (C) 2008-2010 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import commit
class Head(object):
"""
A Head is a named reference to a Commit. Every Head instance contains a name
and a Commit object.
Examples::
>>> repo = Repo("/path/to/repo")
>>> head = repo.heads[0]
>>> head.name
'master'
>>> head.commit
<git.Commit "1c09f116cbc2cb4100fb6935bb162daa4723f455">
>>> head.commit.id
'1c09f116cbc2cb4100fb6935bb162daa4723f455'
"""
def __init__(self, name, commit):
"""
Initialize a newly instanced Head
`name`
is the name of the head
`commit`
is the Commit object that the head points to
"""
self.name = name
self.commit = commit
@cla | ssmethod
def find_all( | cls, repo, **kwargs):
"""
Find all Heads in the repository
`repo`
is the Repo
`kwargs`
Additional options given as keyword arguments, will be passed
to git-for-each-ref
Returns
git.Head[]
List is sorted by committerdate
"""
options = {'sort': "committerdate",
'format': "%(refname)%00%(objectname)"}
options.update(kwargs)
output = repo.git.for_each_ref("refs/heads", **options)
return cls.list_from_string(repo, output)
@classmethod
def list_from_string(cls, repo, text):
"""
Parse out head information into a list of head objects
``repo``
is the Repo
``text``
is the text output from the git-for-each-ref command
Returns
git.Head[]
"""
heads = []
for line in text.splitlines():
heads.append(cls.from_string(repo, line))
return heads
@classmethod
def from_string(cls, repo, line):
"""
Create a new Head instance from the given string.
``repo``
is the Repo
``line``
is the formatted head information
Format::
name: [a-zA-Z_/]+
<null byte>
id: [0-9A-Fa-f]{40}
Returns
git.Head
"""
full_name, ids = line.split("\x00")
if full_name.startswith('refs/heads/'):
name = full_name[len('refs/heads/'):]
else:
name = full_name
c = commit.Commit(repo, id=ids)
return Head(name, c)
def __repr__(self):
return '<git.Head "%s">' % self.name
|
dieb/algorithms.py | tests/sorting/test_bubblesort.py | Python | mit | 283 | 0.003534 | fr | om algorithms.sorting.bubblesort import bubble_sort
def test_bubble_sort_small(array_ints_small, assert_sorted):
assert_sorted(array_ints_small, bubble_sort)
def test_bubble_sort_large(array_ints_large, assert_sorted):
assert_sorted(array_ints_large[:800], bubble_s | ort)
|
arxanas/caenbrew | caenbrew/packages/ffmpeg.py | Python | gpl-3.0 | 1,345 | 0 | from .x264 import X264Package
from .yasm import YasmPackage
from ..packaging import AutotoolsPackage, package
@package
class FfmpegPackage(AutotoolsPackage):
"""Ffmpeg: record, convert and stream audio and video."""
name = "ffmpeg"
homepage = "https://www.ffmpeg.org/"
version = "3.0"
dependencies = [X264Package, YasmPackage]
artifacts = ["bin/ffmpeg",
"bin/ffprobe",
"bin/ffserver",
"share/ffmpeg"]
url = "http://ffmpeg.org/releases/ffmpeg-3.0.tar.bz2"
configure_options = ["--enable-avresample",
"--enable-gpl",
"--enable-libx264",
"--enable-pos | tproc",
"--enable-version3",
"--enable-x11grab",
"--enable-shared",
"--ena | ble-pic"]
def __init__(self, *args, **kwargs):
"""Help `ffmpg` detect `libx264` in `configure`."""
super(FfmpegPackage, self).__init__(*args, **kwargs)
prefix_dir = self._config["prefix_dir"]
self.configure_options += ["--extra-ldflags=-L{}/lib"
.format(prefix_dir),
"--extra-cflags=-I{}/include"
.format(prefix_dir)]
|
khchine5/lino-welfare | lino_welfare/projects/eupen/tests/test_watchtim.py | Python | agpl-3.0 | 36,147 | 0.004096 | # -*- coding: UTF-8 -*-
# Copyright 2013-2017 Luc Saffre
# This file is part of Lino Welfare.
#
# Lino Welfare is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Lino Welfare is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Lino Welfare. If not, see
# <http://www.gnu.org/licenses/>.
"""This module contains tests for the :mod:`watch_tim
<lino_welfare.management.commands.watch_tim>` command.
You can run only these tests by issuing::
$ cd lino_welfare/projects/eupen
$ python manage.py test tests.test_watchtim
The module contains a single huge test case because we don't want
Django to recreate a virgin test database for each of them.
"""
from __future__ import unicode_literals
from builtins import str
import logging
logger = logging.getLogger(__name__)
from django.conf import settings
from django.core.exceptions import ValidationError
from django.utils import translation
from lino.api import dd, rt
from lino.utils import i2d
from lino.utils.djangotest import TestCase
from lino_welfare.modlib.welfare.management.commands.watch_tim import process_line
POST_GEORGES = """{"method":"POST","alias":"PAR","id":"0000023633","time":"20130220 08:55:30",\
"user":"MELANIE","data":{"IDPAR":"0000023633","FIRME":"Schneider Georges","NAME2":"",\
"RUE":"","CP":"","IDPRT":"S","PAYS":"B","TEL":"","FAX":"","COMPTE1":"","NOTVA":"",\
"COMPTE3":"","IDPGP":"","DEBIT":"","CREDIT":"","ATTRIB":"N","IDMFC":"30","LANGUE":"D",\
"IDBUD":"","PROF":"80","CODE1":"","CODE2":"","CODE3":"",\
"DATCREA":{"__date__":{"year":2013,"month":2,"day":20}},"ALLO":"","NB1":"","NB2":"",\
"IDDEV":"","MEMO":"","COMPTE2":"","RUENUM":"","RUEBTE":"","DEBIT2":"","CREDIT2":"",\
"IMPDATE": {"__date__":{"year":0,"month":0,"day":0}},"ATTRIB2":"","CPTSYSI":"",\
"EMAIL":"","MVIDATE":{"__date__":{"year":0,"month":0,"day":0}},"IDUSR":"","DOMI1":""}}"""
PUT_MAX_MORITZ = """{"method":"PUT","alias":"PAR","id":"0000005088","time":"20130222 12:06:01",
"user":"MELANIE","data":{"IDPAR":"0000005088","FIRME":"Müller Max Moritz","NAME2":"",
"RUE":"Werthplatz 12","CP":"4700","IDPRT":"I","PAYS":"B","TEL":"","FAX":"",
"COMPTE1":"001-1234567-89","NOTVA":"BE-0999.999.999","COMPTE3":"","IDPGP":"",
"DEBIT":"","CREDIT":"","ATTRIB":"N","IDMFC":"","LANGUE":"D","IDBUD":"",
"PROF":"80","CODE1":"RH","CODE2":"","CODE3":"",
"DATCREA":{"__date__":{"year":1991,"month":8,"day":12}},
"ALLO":"Herr","NB1":"","NB2":"","IDDEV":"","MEMO":"","COMPTE2":"",
"RUENUM":"","RUEBTE":"","DEBIT2":"","CREDIT2":"",
"IMPDATE":{"__date__":{"year":1999,"month":5,"day":3}},"ATTRIB2":"",
"CPTSYSI":"","EMAIL":"","MVIDATE":{"__date__":{"year":0,"month":0,"day":0}},
"IDUSR":"ALICIA","DOMI1":""}}
"""
POST_PXS = """{"method":"POST","alias":"PXS","id":"0000023635","time":"20130222 11:07:42",
"user":"MELANIEL","data":{"IDPAR":"0000023635","NAME":"Heinz Hinz",
"GEBDAT":{"__date__":{"year":0,"month":0,"day":0}},"APOTHEKE":"","HILFE":"",
"ANTEIL":"","IDMUT":"","VOLLMACHT":{"__date__":{"year":0,"month":0,"day":0}},
"LAUFZEIT":{"__date__":{"year":0,"month":0,"day":0}},"DRINGEND":"","MONATLICH":"",
"SOZIAL":"","MIETE":"","MAF":"","REFERENZ":"","MEMO":"","SEXE":"","GENERIKA":"",
"IDPRT":"S","CARDNUMBER":"","VALID1":{"__date__":{"year":0,"month":0,"day":0}},
"VALID2":{"__date__":{"year":0,"month":0,"day":0}},"CARDTYPE":0,"NATIONALIT":"",
"BIRTHPLACE":"","NOBLECOND":"","CARDISSUER":""}}
"""
# // 2013-02-25 11:46:31 Exception("Cannot handle conversion from <class 'lino_welfare.modlib.pcsw.models.Household'> to <class 'lino_welfare.modlib.pcsw.models.Client'>",)
PUT_PAR_POTTER = """{"method":"PUT","alias":"PAR","id":"0000004260","time":"20130225 11:44:16",
"user":"WIL011","data":{"IDPAR":"0000004260","FIRME":"Voldemort-Potter Harald",
"NAME2":"","RUE":"Schilsweg 26","CP":"4700","IDPRT":"I","PAYS":"B","TEL":"","FAX":"","COMPTE1":"",
"NOTVA":"BE-0999.999.999","COMPTE3":"","IDPGP":"","DEBIT":"","CREDIT":"","ATTRIB":"N","IDMFC":"",
"LANGUE":"D","IDBUD":"","PROF":"80","CODE1":"ER","CODE2":"","CODE3":"",
"DATCREA":{"__date__":{"year":1985,"month":7,"day":23}},"ALLO":"Eheleute","NB1":"","NB2":"",
"IDDEV":"","MEMO":"","COMPTE2":"","RUENUM":"","RUEBTE":"","DEBIT2":"","CREDIT2":"",
"IMPDATE":{"__date__":{"year":2000,"month":6,"day":26}},"ATTRIB2":"","CPTSYSI":"","EMAIL":"",
"MVIDATE":{"__date__":{"year":0,"month":0,"day":0}},"IDUSR":"ALICIA","DOMI1":""}}
"""
#// 2013-02-25 12:00:37 Exception("Cannot handle conversion from <class 'lino_welfare.modlib.pcsw.models.Person'> to <class 'lino_welfare.modlib.pcsw.models.Household'>",)
PUT_PAR_6283 = """
{"method":"PUT","alias":"PAR","id":"0000006283","time":"20130225 11:52:56","user":"WIL011","data":
{"IDPAR":"0000006283","FIRME":"Willekens-Delanuit Paul","NAME2":"","RUE":"Rotenbergplatz","CP":"4700",
"IDPRT":"I","PAYS":"B","TEL":"","FAX":"","COMPTE1":"","NOTVA":"","COMPTE3":"","IDPGP":"",
"DEBIT":"","CREDIT":"","ATTRIB":"A","IDMFC":"","LANGUE":"D","IDBUD":"","PROF":"80","CODE1":"",
"CODE2":"","CODE3":"","DATCREA":{"__date__":{"year":1998,"month":11,"day":17}},
"ALLO":"Eheleute","NB1":"","NB2":"","IDDEV":"","MEMO":"","COMPTE2":"","RUENUM":" 24","RUEBTE":"",
"DEBIT2":"","CREDIT2":"","IMPDATE":{"__date__":{"year":1999,"month":8,"day":9}},
"ATTRIB2":"","CPTSYSI":"","EMAIL":"",
"MVIDATE":{"__date__":{"year":0,"month":0,"day":0}},"IDUSR":"","DOMI1":""}}
"""
User = dd.resolve_model('users.User')
Partner = dd.resolve_model('contacts.Partner')
Company = dd.resolve_model('contacts.Company')
Person = dd.resolve_model('contacts.Person')
Client = dd.resolve_model('pcsw.Client')
Coaching = dd.resolve_model('coachings.Coaching')
Household = dd.resolve_model('households.Household')
households_Type = dd.resolve_model("households.Type")
pcsw = dd.resolve_app("pcsw")
isip = dd.resolve_app("isip")
from lino_welfare.modlib.isip import models as isip
class TestCase(TestCase):
maxDiff = None
def test00(self):
ASD = rt.models.coachings.CoachingType(
id=isip.COACHINGTYPE_ASD, name="ASD")
ASD.save()
DSBE = rt.models.coachings.CoachingType(
id=isip.COACHINGTYPE_DSBE, name="DSBE")
DSBE.save()
User(username='watch_tim').save()
User(username='alicia', coaching_type=DSBE).save()
User(username='roger', coaching_type=ASD).save()
User(username='edgar').save()
households_Type(name="Eheleute", pk=1).save()
settings.SITE.uppercase_last_name = True
#~ def test01(self):
"""
AttributeError 'NoneType' object has no attribute 'coaching_type'
"""
self.assertDoesNotExist(Client, id=23633)
process_line(POST_GEORGES)
georges = Client.objects.get(id=23633)
self.assertEqual(georges.first_name, "Georges")
georges.first_name = "Peter"
georges.save()
process_line(POST_GEORGES)
georges = Client.objects.get(id=23633)
self.assertEqual(georges.first_name, "Georges")
ar = rt.models.changes.Chang | esByMaster.request(georges)
self.assertEqual(ar.get_ | total_count(), 0)
# Company becomes Client
# ValidationError([u'A Partner cannot be parent for a Client']) (201302-22 12:42:07)
# A Partner in TIM has both `PAR->NoTva` nonempty and
# `PARATTR_N` set. It currently exists in Lino as a Company but
# not as a Client. `watch_tim` then must create a Client after
# creating also the intermediate Person. The Company child must
# be removed.
obj = Company(name="Müller Max Moritz", id=5088)
obj.save()
ar = rt.models.changes.ChangesByMaster.request(obj)
self.assertEqual(ar.get_total_count(), 0)
global PUT_MAX_MORITZ
process_line(PUT_MAX_MORITZ)
|
davebshow/projx | projx/__init__.py | Python | mit | 434 | 0 | from .api import Projection, execute_etl
from .grammar import parse_query
from .nxprojx import (reset_index, match, traverse, project, transfer,
| combine, build_subgraph, NXProjector)
from .utils import (test_graph, project_etl, transfer_etl, combine_etl,
multi_transform_etl, draw_simple_graph, remove_edges,
proj_density, neo4j2nx_etl, edgelist2neo4j_etl)
import mo | dules
|
JoaoFelipe/data-mining-algorithms | examples/custom_association_rules.py | Python | mit | 560 | 0.007143 |
import sys
sys.path.append("..")
from data_mining.association_rule.base import rules, lift, support
from data_mining.association_rule.apriori import apriori
from data_mining.association_rule.liftmin import apriorilift
from pat_data_association_rules import compare
LE = | "leite"
PA = "pao"
SU = "suco"
OV = "ovos"
CA = "cafe"
BI = "biscoito"
AR = "arroz"
FE = "feijao"
CE = "cerveja"
MA = "manteiga"
data = [ | [CA, PA, MA], [LE, CE, PA, MA], [CA, PA, MA], [LE, CA, PA, MA],
[CE], [MA], [PA], [FE], [AR, FE], [AR]]
compare(data, 0.0000000001, 5.0, 0)
|
rjshade/grpc | tools/run_tests/run_microbenchmark.py | Python | bsd-3-clause | 10,997 | 0.008457 | #!/usr/bin/env python2.7
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import cgi
import multiprocessing
import os
import subprocess
import sys
import argparse
import python_utils.jobset as jobset
import python_utils.start_port_server as start_port_server
flamegraph_dir = os.path.join(os.path.expanduser('~'), 'FlameGraph')
os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
if not os.path.exists('reports'):
os.makedirs('reports')
start_port_server.start_port_server()
def fnize(s):
out = ''
for c in s:
if c in '<>, /':
if len(out) and out[-1] == '_': continue
out += '_'
else:
out += c
return out
# index html
index_html = """
<html>
<head>
<title>Microbenchmark Results</title>
</head>
<body>
"""
def heading(name):
global index_html
index_html += "<h1>%s</h1>\n" % name
def link(txt, tgt):
global index_html
index_html += "<p><a href=\"%s\">%s</a></p>\n" % (
cgi.escape(tgt, quote=True), cgi.escape(txt))
def text(txt):
global index_html
index_html += "<p><pre>%s</pre></p>\n" % cgi.escape(txt)
def collect_latency(bm_name, args):
"""generate latency profiles"""
benchmarks = []
profile_analysis = []
cleanup = []
heading('Latency Profiles: %s' % bm_name)
subprocess.check_call(
['make', bm_name,
'CONFIG=basicprof', '-j', '%d' % multiprocessing.cpu_count()])
for line in subprocess.check_output(['bins/basicprof/%s' % bm_name,
'--benchmark_list_tests']).splitlines():
link(line, '%s.txt' % fnize(line))
benchmarks.append(
jobset.JobSpec(['bins/basicprof/%s' % bm_name,
'--benchmark_filter=^%s$' % line,
'--benchmark_min_time=0.05'],
environ={'LATENCY_TRACE': '%s.trace' % fnize(line)}))
profile_analysis.append(
jobset.JobSpec([sys.executable,
'tools/profiling/latency_profile/profile_analyzer.py',
'--source', '%s.trace' % fnize(line), '--fmt', 'simple',
'--out', 'reports/%s.txt' % fnize(line)], timeout_seconds=None))
cleanup.append(jobset.JobSpec(['rm', '%s.trace' % fnize(line)]))
# periodically flush out the list of jobs: profile_analysis jobs at least
# consume upwards of five gigabytes of ram in some cases, and so analysing
# hundreds of them at once is impractical -- but we want at least some
# concurrency or the work takes too long
if len(benchmarks) >= min(16, multiprocessing.cpu_count()):
# run up to half the cpu count: each benchmark can use up to two cores
# (one for the microbenchmark, one for the data flush)
jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2))
jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
benchmarks = []
profile_analysis = []
cleanup = []
# run the remaining benchmarks that weren't flushed
if len(benchmarks):
jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2))
jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
def collect_perf(bm_name, args):
"""generate flamegraphs"""
heading('Flamegraphs: %s' % bm_name)
subprocess.check_call(
['make', bm_name,
'CONFIG=mutrace', '-j', '%d' % multiprocessing.cpu_count()])
benchmarks = []
profile_analysis = []
cleanup = []
for line in subprocess.check_output(['bins/mutrace/%s' % bm_name,
'--benchmark_list_tests']).splitlines():
link(line, '%s.svg' % fnize(line))
benchmarks.append(
jobset.JobSpec(['perf', 'record', '-o', '%s-perf.data' % fnize(line),
'-g', '-F', '997',
'bins/mutrace/%s' % bm_name,
'--benchmark_filter=^%s$' % line,
'--benchmark_min_time=10']))
profile_analysis.append(
jobset.JobSpec(['tools/run_tests/performance/process_local_perf_flamegraphs.sh'],
environ = {
'PERF_BASE_NAME': fnize(line),
'OUTPUT_DIR': 'reports',
'OUTPUT_FILENAME': fnize(line),
}))
cleanup.append(jobset.JobSpec(['rm', '%s-perf.data' % fnize(line)]))
cleanup.append(jobset.JobSpec(['rm', '%s-out.perf' % fnize(line)]))
# periodically flush out the list of jobs: temporary space required for this
# processing is large
if len(benchmarks) >= 20:
# run up to half the cpu count: each benchmark can use up to two cores
# (one for the microbenchmark, one for the data flush)
jobset.run(benchmarks, maxjobs=1)
jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
benchmarks = []
profile_analysis = []
cleanup = []
# run the remaining benchmarks that weren't flushed
if len(benchmarks):
jobset.run(benchmarks, maxjobs=1)
jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
def run_summary(bm_name, cfg, base_json_name):
subprocess.check_call(
| ['make', bm_name,
'CONFIG=%s' % cfg, '-j', '%d' % multiprocessing.cpu_count()])
cmd | = ['bins/%s/%s' % (cfg, bm_name),
'--benchmark_out=%s.%s.json' % (base_json_name, cfg),
'--benchmark_out_format=json']
if args.summary_time is not None:
cmd += ['--benchmark_min_time=%d' % args.summary_time]
return subprocess.check_output(cmd)
def collect_summary(bm_name, args):
heading('Summary: %s [no counters]' % bm_name)
text(run_summary(bm_name, 'opt', bm_name))
heading('Summary: %s [with counters]' % bm_name)
text(run_summary(bm_name, 'counters', bm_name))
if args.bigquery_upload:
with open('%s.csv' % bm_name, 'w') as f:
f.write(subprocess.check_output(['tools/profiling/microbenchmarks/bm2bq.py',
'%s.counters.json' % bm_name,
'%s.opt.json' % bm_name]))
subprocess.check_call(['bq', 'load', 'microbenchmarks.microbenchmarks', '%s.csv' % bm_name])
collectors = {
'latency': collect_latency,
'perf': collect_perf,
'summary': collect_summary,
}
argp = argparse.ArgumentParser(description='Collect data from microbenchmarks')
argp.add_argument('-c', '--collect',
choices=sorted(collectors.keys()),
nargs='*',
default=sorted(collectors.keys()),
|
cjordog/NRLWebsite | demo/uw1.py | Python | mit | 14,884 | 0.019887 | #!/usr/bin/python
import json, math, sys, string, random, subprocess, serial
from time import localtime, strftime, clock, time # for timestamping packets
import time
import hashlib #for checksum purposes
import mysql.connector # mysql database
import getpass
import urllib2
import requests
sys.path.append('/usr/lib/python2.7/dist-packages')
################################################################################
### Auxiliary functions
################################################################################
### Returns the hex value of the xor of all characters in a string.
def xor_string_hash(string):
ret = 0
for char in string:
ret ^= ord(char)
return hex(ret)[2:] ### return everything but the first two characters, "0x"
### Formats numbers with suffix e.g.: ord(1) -> "1st", ord(2) -> "2nd"
def ord(n):
return str(n)+("th" if 4<=n%100<=20 else {1:"st",2:"nd",3:"rd"}.get(n%10, "th"))
################################################################################
### 0. Logfile Setup
################################################################################
### TODO: may want to remove logging when debugging is complete, or allow it to be toggled
logfile_name = 'logs/UWNet-{0}.LOG'.format(strftime("20%y-%m-%d--%H:%M:%S", localtime()))
logfile = open(logfile_name, 'w')
### | #############################################################################
### 1. Port configuration
################################################################################
### Setup the port to be read from ( /dev/ttyUSB0 ) with timeout to enable
### recov | ery from packet loss.
port_ttyUSB0 = serial.Serial(port='/dev/ttyUSB0', baudrate=115200)
port_ttyUSB1 = serial.Serial(port='/dev/ttyUSB1', baudrate=115200, timeout= 25)
### For each port, enter command mode (+++A) and enable checksum ($HHCRW,MMCHK,1),
### then check for success.
port_ttyUSB0.write("+++A\r\n")
if ("MMOKY" not in port_ttyUSB0.readline()):
print "error in here"
logfile.write('CRITICAL ERROR: cannot enter command mode for ttyUSB0 ... exiting')
exit(0) ### TODO: do something better upon failure, maybe try to fix!
### TODO: set MMCHK to 1 if want checksum
port_ttyUSB0.write("$HHCRW,MMCHK,0\r\n")
if ("MMOKY" not in port_ttyUSB0.readline()):
print "Something wrong with USB 0 "
logfile.write('error in setting the checksum register for ttyUSB0')
exit(0)
port_ttyUSB1.write("+++A\r\n")
if ("MMOKY" not in port_ttyUSB1.readline()):
print "Something wrong with USB1"
logfile.write('CRITICAL ERROR: cannot enter command mode for ttyUSB1 ... exiting')
exit(0)
port_ttyUSB1.write("$HHCRW,MMCHK,0\r\n")
if ("MMOKY" not in port_ttyUSB1.readline()):
print "Something wrong here, usb 1"
logfile.write('error in setting the checksum register for ttyUSB1')
exit(0)
################################################################################
### 2. Retrieve experiments
################################################################################
### Resources:
### http://dev.mysql.com/doc/refman/5.5/en/index.html
### https://docs.python.org/2/howto/webservers.html?highlight=mysql
### http://dev.mysql.com/doc/connector-python/en/connector-python-example-cursor-select.html
### Connect to the database.
cnx = mysql.connector.connect(user= 'ruolinfan', password='pass', host='localhost', database='UWNet')
### TODO: may need to change parameters for mysql.connector.connect() depending on
### which machine we are using.
### TODO: create a standard user for the database; include script in create.sql
### getpass library extracts information regarding the machine's user name. issue though is with password. Does it have to always be hardwired is there another way?
cursor = cnx.cursor()
cursor_insert = cnx.cursor()
### Retrieve rows from InputQueue table for experiments which have not been run
###retrieve_experiments = ("SELECT id, mpwr, lpwr, ppwr, mbkn, lbkn, pbkn, mmod, lmod, pmod, rptt, testData FROM InputQueue WHERE exitStatus IS NULL")
###cursor.execute(retrieve_experiments)
### Store each row in a dictionary.
someurl1 = 'http://apus.cs.ucla.edu/getParams.php'
content = urllib2.urlopen(someurl1).read()
###print content
###print content;
parsed_json = json.loads(content)
if not parsed_json['experiments']:
print "No experiment to run!"
exit(0)
rows = parsed_json['experiments'][0]['row']
###selected_rows = {
###for (id, mpwr, lpwr, ppwr, mbkn, lbkn, pbkn, mmod, lmod, pmod, rptt, testData) in cursor:
selected_rows = { 'mpwr': int(rows['mpwr']), 'lpwr': int(rows['lpwr']), 'ppwr': int(rows['ppwr']), 'mbkn':int(rows['mbkn']), 'lbkn': int(rows['lbkn']), 'pbkn': int(rows['pbkn']), 'mmod': int(rows['mmod']), 'lmod':int(rows['lmod']), 'pmod': int(rows['pmod']), 'rptt': int(rows['rptt']), 'testData': rows['testData'] }
id = rows['id']
###print id
fileid = selected_rows['testData']
fileid = fileid[5:]
someurl2 = 'http://apus.cs.ucla.edu/getFile.php?filename='+fileid
content1 = urllib2.urlopen(someurl2).read()
###print content1
### Collect results from each trial in this dictionary for insertion into Results
### table. The keys correspond to the 'id' column in the InputQueue table.
allResults = {}
exit_code = 0
################################################################################
### 3. Run each experiment on each combination of { pwr, bkn, mod }, rptt times
################################################################################
### handle each enqueued experiment
### TODO: report errors, store in database
### code NULL: exited normally
### code NOT NULL: error
### - KE: kermit configuration
### - DB: database access
### - PT: port configuration
### - etc...
### TODO: Devise error code scheme, or decide that it is unnecessary...
###for id in selected_rows:
for x in range(0, 1):
row = selected_rows
logfile.write('===== STARTING EXPERIMENT {0} =====\n\n'.format(id))
print '===== STARTING EXPERIMENT {0} =====\n'.format(id)
### Each element of the following list will be its own row in Results.
### All elements in this list will have the same experimentID.
resultsList = []
text_file = open("Output.txt", "w")
firstline = rows['id'] + '\n'
text_file.write(firstline)
secondline = str(exit_code) + '\n'
text_file.write(secondline)
for transmission_mode in range(row['lmod'], row['mmod'] + 1, row['pmod']):
if transmission_mode == 1:
bytes_per_block = 38
elif transmission_mode == 2:
bytes_per_block = 80
elif transmission_mode == 3:
bytes_per_block = 122
elif transmission_mode == 4:
bytes_per_block = 164
elif transmission_mode == 5:
bytes_per_block = 248
else:
logfile.write('ERROR: Transmit mode of {0} invalid; ranges from 1 to 5 ... skipping\n'.format(transmission_mode))
exit_code = 1
continue
logfile.write('-> transmission_mode := {0}\n'.format(transmission_mode))
print '-> transmission_mode := {0}'.format(transmission_mode)
for blocks_per_packet in range(row['lbkn'], row['mbkn'] + 1, row['pbkn']):
logfile.write('-> blocks_per_packet := {0}\n'.format(blocks_per_packet))
print '-> blocks_per_packet := {0}'.format(blocks_per_packet)
packet_length = bytes_per_block * blocks_per_packet
for transmission_power in range(row['lpwr'], row['mpwr'] + 1, row['ppwr']):
logfile.write('-> transmission_power := {0}\n'.format(transmission_power))
print '-> transmission_power := {0}'.format(transmission_power)
port_ttyUSB0.write("$HHCRW,TXPWR,{0}\r\n".format(transmission_power))
logfile.write('-> TXPWR := transmission_power\n\n')
print '-> TXPWR := transmission_power\n'
### Collect data for each trial in a dictionary, keyed by trial number.
collectionOfTrials = {}
for trial in range(row['rptt']): ### repeat the experiment!
logfile.write('\tTrial {0}\n\n'.format(trial))
print '\tTrial {0}\n'.format(trial)
### Keep track of packet loss, retransmissions, and execution time.
n_loss = 0
n_retx = 0
start_time = time.time()
|
kaiix/depot_tools | apply_issue.py | Python | bsd-3-clause | 10,737 | 0.009686 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies an issue from Rietveld.
"""
import getpass
import json
import logging
import optparse
import os
import subprocess
import sys
import urllib2
import breakpad # pylint: disable=W0611
import annotated_gclient
import auth
import checkout
import fix_encoding
import gclient_utils
import rietveld
import scm
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class Unbuffered(object):
"""Disable buffering on a file object."""
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
def main():
# TODO(pgervais): This function is way too long. Split.
sys.stdout = Unbuffered(sys.stdout)
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option(
'-v', '--verbose', action='count', default=0,
help='Prints debugging infos')
parser.add_option(
'-e', '--email',
help='Email address to access rietveld. If not specified, anonymous '
'access will be used.')
parser.add_option(
'-E', '--email-file',
help='File containing the email address to access rietveld. '
'If not specified, anonymous access will be used.')
parser.add_option(
'-k', '--private-key-file',
help='Path to file containing a private key in p12 format for OAuth2 '
'authentication with "notasecret" password (as generated by Google '
'Cloud Console).')
parser.add_option(
'-i', '--issue', type='int', help='Rietveld issue number')
parser.add_option(
'-p', '--patchset', type='int', help='Rietveld issue\'s patchset number')
parser.add_option(
'-r',
'--root_dir',
default=os.getcwd(),
help='Root directory to apply the patch')
parser.add_option(
'-s',
'--server',
default='http://codereview.chromium.org',
help='Rietveld server')
parser.add_option('--no-auth', action='store_true',
help='Do not attempt authenticated requests.')
parser.add_option('--revision-mapping', default='{}',
help='When running gclient, annotate the got_revisions '
'using the revision-mapping.')
parser.add_option('-f', '--force', action='store_true',
help='Really run apply_issue, even if .update.flag '
'is detected.')
parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.')
parser.add_option('--whitelist', action='append', default=[],
help='Patch only specified file(s).')
parser.add_option('--blacklist', action='append', default=[],
help='Don\'t patch specified file(s).')
parser.add_option('-d', '--ignore_deps', action='store_true',
help='Don\'t run gclient sync on DEPS changes.')
auth.add_auth_options(parser)
options, args = parser.parse_args()
auth_config = auth.extract_auth_config_from_options(options)
if options.whitelist and options.blacklist:
parser.error('Cannot specify both --whitelist and --blacklist')
if options.email and options.email_file:
parser.error('-e and -E options are incompatible')
if (os.path.isfile(os.path.join(os.getcwd(), 'update.flag'))
and not options.force):
print 'update.flag file found: bot_update has run and checkout is already '
print 'in a consistent state. No actions will be performed in this step.'
retu | rn 0
logging.basicConfig(
format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s',
level=[logging.WARNING, logging.INFO, logging.DEBUG][
min(2, options.verbose)])
if args:
parser.error('Extra argument(s) "%s" not understood' % ' '.join(args))
if not options.issue:
pars | er.error('Require --issue')
options.server = options.server.rstrip('/')
if not options.server:
parser.error('Require a valid server')
options.revision_mapping = json.loads(options.revision_mapping)
# read email if needed
if options.email_file:
if not os.path.exists(options.email_file):
parser.error('file does not exist: %s' % options.email_file)
with open(options.email_file, 'rb') as f:
options.email = f.read().strip()
print('Connecting to %s' % options.server)
# Always try un-authenticated first, except for OAuth2
if options.private_key_file:
# OAuth2 authentication
obj = rietveld.JwtOAuth2Rietveld(options.server,
options.email,
options.private_key_file)
properties = obj.get_issue_properties(options.issue, False)
else:
# Passing None as auth_config disables authentication.
obj = rietveld.Rietveld(options.server, None)
properties = None
# Bad except clauses order (HTTPError is an ancestor class of
# ClientLoginError)
# pylint: disable=E0701
try:
properties = obj.get_issue_properties(options.issue, False)
except urllib2.HTTPError as e:
if e.getcode() != 302:
raise
if options.no_auth:
exit('FAIL: Login detected -- is issue private?')
# TODO(maruel): A few 'Invalid username or password.' are printed first,
# we should get rid of those.
except rietveld.upload.ClientLoginError as e:
# Fine, we'll do proper authentication.
pass
if properties is None:
obj = rietveld.Rietveld(options.server, auth_config, options.email)
try:
properties = obj.get_issue_properties(options.issue, False)
except rietveld.upload.ClientLoginError as e:
print('Accessing the issue requires proper credentials.')
return 1
if not options.patchset:
options.patchset = properties['patchsets'][-1]
print('No patchset specified. Using patchset %d' % options.patchset)
issues_patchsets_to_apply = [(options.issue, options.patchset)]
depends_on_info = obj.get_depends_on_patchset(options.issue, options.patchset)
while depends_on_info:
depends_on_issue = int(depends_on_info['issue'])
depends_on_patchset = int(depends_on_info['patchset'])
try:
depends_on_info = obj.get_depends_on_patchset(depends_on_issue,
depends_on_patchset)
issues_patchsets_to_apply.insert(0, (depends_on_issue,
depends_on_patchset))
except urllib2.HTTPError:
print ('The patchset that was marked as a dependency no longer '
'exists: %s/%d/#ps%d' % (
options.server, depends_on_issue, depends_on_patchset))
print 'Therefore it is likely that this patch will not apply cleanly.'
print
depends_on_info = None
num_issues_patchsets_to_apply = len(issues_patchsets_to_apply)
if num_issues_patchsets_to_apply > 1:
print
print 'apply_issue.py found %d dependent CLs.' % (
num_issues_patchsets_to_apply - 1)
print 'They will be applied in the following order:'
num = 1
for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
print ' #%d %s/%d/#ps%d' % (
num, options.server, issue_to_apply, patchset_to_apply)
num += 1
print
for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply,
patchset_to_apply)
print('Downloading patch from %s' % issue_url)
try:
patchset = obj.get_patch(issue_to_apply, patchset_to_apply)
except urllib2.HTTPError as e:
print(
'Failed to fetch the patch for issue %d, patchset %d.\n'
'Try visiting %s/%d') % (
issue_to_apply, patchset_to_apply,
options.server, issue_to_apply)
return 1
if options.whitelist:
patchset.patches = [patch for patch in patchset.patches
if patch.filename in options.whitelist]
if options.blacklist:
pa |
daxm/fmcapi | unit_tests/failoverinterfacemacaddressconfigs.py | Python | bsd-3-clause | 958 | 0 | import logging
import fmcapi
def test__failoverinterfacemacaddressconfigs(fmc):
logging. | info( |
"Test FailoverInterfaceMACAddressConfigs. get, post, put, "
"delete FailoverInterfaceMACAddressConfigs Objects"
)
obj1 = fmcapi.DeviceHAFailoverMAC(fmc=fmc, ha_name="HaName")
obj1.p_interface(name="GigabitEthernet0/0", device_name="device_name")
obj1.failoverActiveMac = "0050.5686.718f"
obj1.failoverStandbyMac = "1050.5686.0c2e"
logging.info("DeviceHAFailoverMAC POST->")
logging.info(obj1.format_data())
obj1.post()
del obj1
obj1 = fmcapi.DeviceHAFailoverMAC(fmc=fmc)
obj1.edit(name="GigabitEthernet0/0", ha_name="HaName")
obj1.failoverStandbyMac = "0050.5686.0c2e"
logging.info("DeviceHAFailoverMAC PUT->")
logging.info(obj1.format_data())
del obj1
obj1 = fmcapi.DeviceHAFailoverMAC(fmc=fmc)
obj1.edit(name="GigabitEthernet0/0", ha_name="HaName")
obj1.delete()
|
cfossace/crits | crits/events/api.py | Python | mit | 3,519 | 0.000853 | from django.core.urlresolvers import reverse
from tastypie import authorization
from tastypie.authentication import MultiAuthentication
from crits.events.event import Event
from crits.events.handlers import add_new_event
from crits.core.api import CRITsApiKeyAuthentication, CRITsSessionAuthentication
from crits.core.api import CRITsSerializer, CRITsAPIResource
from crits.vocabulary.events import EventTypes
class EventResource(CRITsAPIResource):
"""
Class to handle everything related to the Event API.
Currently supports GET and POST.
"""
class Meta:
object_class = Event
allowed_methods = ('get', 'post', 'patch')
resource_name = "events"
authentication = MultiAuthentication(CRITsApiKeyAuthentication(),
CRITsSessionAuthentication())
authorization = authorization.Authorization()
serializer = CRITsSerializer()
def get_object_list(self, request):
"""
Use the CRITsAPIResource to get our objects but provide the class to get
the objects from.
:param request: The incoming request.
:type request: :class:`django.http.HttpRequest`
:returns: Resulting objects in the specified format (JSON by default).
"""
return super(EventResource, self).get_object_list(request, Event)
def obj_create(self, bundle, **kwargs):
"""
Handles creating Events through the API.
:param bundle: Bundle containing the information to create the Event.
:type bundle: Tastypie Bundle object.
:returns: HttpResponse.
"""
analyst = bundle.request.user.username
title = bundle.data.get('title', None)
description = bundle.data.get('description', None)
event_type = bundle.data.get('event_type', None)
source = bundle.data.get('source', None)
method = bundle.data.get('method', None)
reference = bundle.data.get('reference', None)
date = bundle.data.get('date', None)
bucket_list = bundle.data.get('bucket_list', None)
ticket = bundle.data.get('ticket', None)
content = {'return_code': 0,
'type': 'Event'}
if not title or not event_type or not source or not description:
content['message'] = 'Must provide a title, event_type, source, and description.'
| self.crits_response(content)
if event_type not in EventTypes.values():
content['message'] = 'Not a valid Event Type.'
self.crits_response(content)
result = add_new_event(title,
description,
event_type,
source,
method,
reference,
date | ,
analyst,
bucket_list,
ticket)
if result.get('message'):
content['message'] = result.get('message')
content['id'] = result.get('id', '')
if result.get('id'):
url = reverse('api_dispatch_detail',
kwargs={'resource_name': 'events',
'api_name': 'v1',
'pk': result.get('id')})
content['url'] = url
if result['success']:
content['return_code'] = 0
self.crits_response(content)
|
sumaxime/LIFAP1 | TD/TD7/Code/Python/7.py | Python | mit | 539 | 0 | #!/usr/bin/python
# Remplir un tableau avec les coefficients du triangle de Pascal
from math import factor | ial as fact
def combin(n, p):
res = int(fact(n) / (fact(p) * fact(n - p)))
return res
def triangle_ | pascal(tab):
for i in range(len(tab)):
for j in range(i + 1):
tab[i][j] = combin(i, j)
tab = [[[] for i in range(6)] for j in range(6)]
triangle_pascal(tab)
# Affichage de tableau
for k in range(len(tab)):
for m in range(k + 1):
print(tab[k][m], '', sep='\t', end='')
print('')
|
odoousers2014/odoo | addons/account/account.py | Python | agpl-3.0 | 190,066 | 0.00694 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# |
##############################################################################
import logging
from datetime import datetime
from dateutil.relativedelta import relativedelta
from operator import itemgetter
import time
import openerp
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.osv import fields, osv, expression
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.exceptions import UserError
import openerp.addons.decimal_precision as dp
_logger = logging.getLogger(__name__)
def check_cycle(self, cr, uid, ids, context=None):
""" climbs the ``self._table.parent_id`` chains for 100 levels or
until it can't find any more parent(s)
Returns true if it runs out of parents (no cycle), false if
it can recurse 100 times without ending all chains
"""
level = 100
while len(ids):
cr.execute('SELECT DISTINCT parent_id '\
'FROM '+self._table+' '\
'WHERE id IN %s '\
'AND parent_id IS NOT NULL',(tuple(ids),))
ids = map(itemgetter(0), cr.fetchall())
if not level:
return False
level -= 1
return True
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'income_currency_exchange_account_id': fields.many2one(
'account.account',
string="Gain Exchange Rate Account",
domain="[('type', '=', 'other')]",),
'expense_currency_exchange_account_id': fields.many2one(
'account.account',
string="Loss Exchange Rate Account",
domain="[('type', '=', 'other')]",),
}
class account_payment_term(osv.osv):
_name = "account.payment.term"
_description = "Payment Term"
_columns = {
'name': fields.char('Payment Term', translate=True, required=True),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the payment term without removing it."),
'note': fields.text('Description', translate=True),
'line_ids': fields.one2many('account.payment.term.line', 'payment_id', 'Terms', copy=True),
}
_defaults = {
'active': 1,
}
_order = "name"
def compute(self, cr, uid, id, value, date_ref=False, context=None):
if not date_ref:
date_ref = datetime.now().strftime('%Y-%m-%d')
pt = self.browse(cr, uid, id, context=context)
amount = value
result = []
obj_precision = self.pool.get('decimal.precision')
prec = obj_precision.precision_get(cr, uid, 'Account')
for line in pt.line_ids:
if line.value == 'fixed':
amt = round(line.value_amount, prec)
elif line.value == 'procent':
amt = round(value * (line.value_amount/100.0), prec)
elif line.value == 'balance':
amt = round(amount, prec)
if amt:
next_date = (datetime.strptime(date_ref, '%Y-%m-%d') + relativedelta(days=line.days))
if line.days2 < 0:
next_first_date = next_date + relativedelta(day=1,months=1) #Getting 1st of next month
next_date = next_first_date + relativedelta(days=line.days2)
if line.days2 > 0:
next_date += relativedelta(day=line.days2, months=1)
result.append( (next_date.strftime('%Y-%m-%d'), amt) )
amount -= amt
amount = reduce(lambda x,y: x+y[1], result, 0.0)
dist = round(value-amount, prec)
if dist:
result.append( (time.strftime('%Y-%m-%d'), dist) )
return result
class account_payment_term_line(osv.osv):
_name = "account.payment.term.line"
_description = "Payment Term Line"
_columns = {
'value': fields.selection([('procent', 'Percent'),
('balance', 'Balance'),
('fixed', 'Fixed Amount')], 'Computation',
required=True, help="""Select here the kind of valuation related to this payment term line. Note that you should have your last line with the type 'Balance' to ensure that the whole amount will be treated."""),
'value_amount': fields.float('Amount To Pay', digits_compute=dp.get_precision('Payment Term'), help="For percent enter a ratio between 0-100%."),
'days': fields.integer('Number of Days', required=True, help="Number of days to add before computation of the day of month." \
"If Date=15/01, Number of Days=22, Day of Month=-1, then the due date is 28/02."),
'days2': fields.integer('Day of the Month', required=True, help="Day of the month, set -1 for the last day of the current month. If it's positive, it gives the day of the next month. Set 0 for net days (otherwise it's based on the beginning of the month)."),
'payment_id': fields.many2one('account.payment.term', 'Payment Term', required=True, select=True, ondelete='cascade'),
}
_defaults = {
'value': 'balance',
'days': 30,
'days2': 0,
}
_order = "value desc,days"
def _check_percent(self, cr, uid, ids, context=None):
obj = self.browse(cr, uid, ids[0], context=context)
if obj.value == 'procent' and ( obj.value_amount < 0.0 or obj.value_amount > 100.0):
return False
return True
_constraints = [
(_check_percent, 'Percentages for Payment Term Line must be between 0 and 100.', ['value_amount']),
]
class account_account_type(osv.osv):
_name = "account.account.type"
_description = "Account Type"
def _get_financial_report_ref(self, cr, uid, context=None):
obj_data = self.pool.get('ir.model.data')
obj_financial_report = self.pool.get('account.financial.report')
financial_report_ref = {}
for key, financial_report in [
('asset','account_financial_report_assets0'),
('liability','account_financial_report_liability0'),
('income','account_financial_report_income0'),
('expense','account_financial_report_expense0'),
]:
try:
financial_report_ref[key] = obj_financial_report.browse(cr, uid,
obj_data.get_object_reference(cr, uid, 'account', financial_report)[1],
context=context)
except ValueError:
pass
return financial_report_ref
def _get_current_report_type(self, cr, uid, ids, name, arg, context=None):
res = {}
financial_report_ref = self._get_financial_report_ref(cr, uid, context=context)
for record in self.browse(cr, uid, ids, context=context):
res[record.id] = 'none'
for key, financial_report in financial_report_ref.items():
list_ids = [x.id for x in financial_report.account_type_ids]
if record.id in list_ids:
res[record.id] = key
return res
def _save_report_type(self, cr, uid, account_type_id, field_name, field_value, arg, context=None):
field_value = field_value or 'none'
|
fracpete/python-weka-wrapper-examples | src/wekaexamples/classifiers/incremental_classifier.py | Python | gpl-3.0 | 1,881 | 0.001595 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# incremental_classifier.py
# Copyright (C) 2014 Fracpete (pythonwekawrapper at gmail dot com)
import os
import sys
import traceback
import weka.core.jvm as jvm
import wekaexamples.helper as helper
from weka.core.converters import Loader
from weka.classifiers import Classifier
def main(args):
"""
Trains a NaiveBayesUpdateable class | ifier incrementally on a dataset. The dataset can be supplied as parameter.
:param args: the commandline arguments
:type args: list
"""
# load a dataset
if len(args) <= 1:
data_file = helper.get_data_dir() + os.sep + "vote.arff"
else:
data_file = args[1]
helper.print_info("Loading dataset: " + data_file)
loader = Loader(classname="weka.core.converters.ArffLoader") |
data = loader.load_file(data_file, incremental=True)
data.class_is_last()
# classifier
nb = Classifier(classname="weka.classifiers.bayes.NaiveBayesUpdateable")
nb.build_classifier(data)
# train incrementally
for inst in loader:
nb.update_classifier(inst)
print(nb)
if __name__ == "__main__":
try:
jvm.start()
main(sys.argv)
except Exception, e:
print(traceback.format_exc())
finally:
jvm.stop()
|
quantumlib/ReCirq | recirq/readout_scan/tasks.py | Python | apache-2.0 | 4,314 | 0.001854 | # Copyright 2020 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --- This file has been autogenerated --- #
# --- from docs/Readout-Data-Collection.ipynb --- #
# --- Do not edit this file directly --- #
import os
import numpy as np
import sympy
import cirq
import recirq
@recirq.json_serializable_dataclass(namespace='recirq.readout_scan',
registry=recirq.Registry,
frozen=True)
class ReadoutScanTask:
"""Scan over Ry(theta) angles from -pi/2 to 3pi/2 tracing out a sinusoid
which is primarily affected by readout error.
See Also:
:py:func:`run_readout_scan`
Attributes:
dataset_id: A unique identifier for this dataset.
device_name: The device to run on, by name.
n_shots: The number of repetitions for each theta value.
qubit: The qubit to benchmark.
resolution_factor: We select the number of points in the linspace
so that the special points: (-1/2, 0, 1/2, 1, 3/2) * pi are
always included. The total number of theta evaluations
is resolution_factor * 4 + 1.
"""
dataset_id: str
device_name: str
n_shots: int
qubit: cirq.GridQubit
re | solution_factor: int
@property
def fn(self):
n_shots = _abbrev_n_shots(n_shots=self.n_shots)
qubit = _abbrev_grid_qubit(self.qubit)
return (f'{self.dataset_id}/'
f'{self.device_name}/'
f'q-{qubit}/'
f'ry_scan_{self.resolution_factor}_{n_shots}')
# Define the following helper functions to make nicer `fn` keys
# for the tasks:
def _abbrev_n_shots(n_shots: int) -> str:
| """Shorter n_shots component of a filename"""
if n_shots % 1000 == 0:
return f'{n_shots // 1000}k'
return str(n_shots)
def _abbrev_grid_qubit(qubit: cirq.GridQubit) -> str:
"""Formatted grid_qubit component of a filename"""
return f'{qubit.row}_{qubit.col}'
EXPERIMENT_NAME = 'readout-scan'
DEFAULT_BASE_DIR = os.path.expanduser(f'~/cirq-results/{EXPERIMENT_NAME}')
def run_readout_scan(task: ReadoutScanTask,
base_dir=None):
"""Execute a :py:class:`ReadoutScanTask` task."""
if base_dir is None:
base_dir = DEFAULT_BASE_DIR
if recirq.exists(task, base_dir=base_dir):
print(f"{task} already exists. Skipping.")
return
# Create a simple circuit
theta = sympy.Symbol('theta')
circuit = cirq.Circuit([
cirq.ry(theta).on(task.qubit),
cirq.measure(task.qubit, key='z')
])
# Use utilities to map sampler names to Sampler objects
sampler = recirq.get_sampler_by_name(device_name=task.device_name)
# Use a sweep over theta values.
# Set up limits so we include (-1/2, 0, 1/2, 1, 3/2) * pi
# The total number of points is resolution_factor * 4 + 1
n_special_points: int = 5
resolution_factor = task.resolution_factor
theta_sweep = cirq.Linspace(theta, -np.pi / 2, 3 * np.pi / 2,
resolution_factor * (n_special_points - 1) + 1)
thetas = np.asarray([v for ((k, v),) in theta_sweep.param_tuples()])
flat_circuit, flat_sweep = cirq.flatten_with_sweep(circuit, theta_sweep)
# Run the jobs
print(f"Collecting data for {task.qubit}", flush=True)
results = sampler.run_sweep(program=flat_circuit, params=flat_sweep,
repetitions=task.n_shots)
# Save the results
recirq.save(task=task, data={
'thetas': thetas,
'all_bitstrings': [
recirq.BitArray(np.asarray(r.measurements['z']))
for r in results]
}, base_dir=base_dir)
|
nagyistoce/devide | modules/filters/resources/python/resampleImageViewFrame.py | Python | bsd-3-clause | 2,908 | 0.005158 | #!/usr/bin/env python
# -*- coding: ansi_x3.4-1968 -*-
# generated by wxGlade 0.6.3 on Sat Feb 09 13:36:33 2008
import wx
# begin wxGlade: extracode
# end wxGlade
class resampleImageViewFrame(wx.Frame):
def | __init__(self, *args, **kwds):
# begin wxGlade: resampleImageViewFrame.__init__
kwds["style"] = w | x.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.viewFramePanel = wx.Panel(self, -1)
self.label_1 = wx.StaticText(self.viewFramePanel, -1, "Interpolation type:")
self.interpolationTypeChoice = wx.Choice(self.viewFramePanel, -1, choices=["Nearest Neighbour", "Linear", "Cubic"])
self.label_2 = wx.StaticText(self.viewFramePanel, -1, "x,y,z Magnification:")
self.magFactorXText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.label_3 = wx.StaticText(self.viewFramePanel, -1, ",")
self.magFactorYText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.label_4 = wx.StaticText(self.viewFramePanel, -1, ",")
self.magFactorZText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.__set_properties()
self.__do_layout()
# end wxGlade
def __set_properties(self):
# begin wxGlade: resampleImageViewFrame.__set_properties
self.SetTitle("frame_1")
self.interpolationTypeChoice.SetSelection(0)
# end wxGlade
def __do_layout(self):
# begin wxGlade: resampleImageViewFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_2 = wx.BoxSizer(wx.VERTICAL)
sizer_3 = wx.BoxSizer(wx.VERTICAL)
sizer_5 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4.Add(self.label_1, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 3)
sizer_4.Add(self.interpolationTypeChoice, 1, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_3.Add(sizer_4, 1, wx.BOTTOM|wx.EXPAND, 7)
sizer_5.Add(self.label_2, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 3)
sizer_5.Add(self.magFactorXText, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 3)
sizer_5.Add(self.label_3, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 3)
sizer_5.Add(self.magFactorYText, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 3)
sizer_5.Add(self.label_4, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 3)
sizer_5.Add(self.magFactorZText, 0, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_3.Add(sizer_5, 1, wx.EXPAND, 0)
sizer_2.Add(sizer_3, 1, wx.ALL|wx.EXPAND, 7)
self.viewFramePanel.SetSizer(sizer_2)
sizer_1.Add(self.viewFramePanel, 1, wx.EXPAND, 0)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
self.Layout()
# end wxGlade
# end of class resampleImageViewFrame
if __name__ == "__main__":
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
frame_1 = resampleImageViewFrame(None, -1, "")
app.SetTopWindow(frame_1)
frame_1.Show()
app.MainLoop()
|
GoogleCloudPlatform/explainable_ai_sdk | explainable_ai_sdk/metadata/tf/v1/utils_test.py | Python | apache-2.0 | 2,629 | 0.004184 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language go | verning permissions and
# limitations under the License.
"""Tests for metadata utils."""
import os
import tensorflow.compat.v1 as tf
from explainable_ai_sdk.metadata.tf.v1 import utils
class UtilsTest(tf.test.TestCase):
def test_save_graph_model_explicit_session(self):
sess = tf.Session(g | raph=tf.Graph())
with sess.graph.as_default():
x = tf.placeholder(shape=[None, 10], dtype=tf.float32, name='inp')
weights = tf.constant(1., shape=(10, 2), name='weights')
model_path = os.path.join(tf.test.get_temp_dir(), 'explicit')
utils.save_graph_model(sess, model_path, {'x': x}, {'w': weights}, {'tag'})
self.assertTrue(os.path.isfile(os.path.join(model_path, 'saved_model.pb')))
tf.reset_default_graph()
loading_session = tf.Session(graph=tf.Graph())
with loading_session.graph.as_default():
tf.saved_model.loader.load(loading_session, ['tag'], model_path)
self.assertIn(x.op.name,
[n.name for n in loading_session.graph.as_graph_def().node])
self.assertIn(weights.op.name,
[n.name for n in loading_session.graph.as_graph_def().node])
def test_save_graph_model_default_session(self):
x = tf.placeholder(shape=[None, 10], dtype=tf.float32, name='inp')
weights = tf.constant(1., shape=(10, 2), name='weights')
model_path = os.path.join(tf.test.get_temp_dir(), 'default')
utils.save_graph_model(
tf.Session(), model_path, {'x': x}, {'w': weights}, {'tag'})
self.assertTrue(os.path.isfile(os.path.join(model_path, 'saved_model.pb')))
def test_save_graph_model_kwargs(self):
x = tf.placeholder(shape=[None, 10], dtype=tf.float32, name='inp')
weights = tf.constant(1., shape=(10, 2), name='weights')
model_path = os.path.join(tf.test.get_temp_dir(), 'kwargs')
utils.save_graph_model(
tf.Session(),
model_path, {'x': x}, {'w': weights}, {'tag'},
main_op=tf.tables_initializer(),
strip_default_attrs=False)
self.assertTrue(os.path.isfile(os.path.join(model_path, 'saved_model.pb')))
if __name__ == '__main__':
tf.test.main()
|
dem4ply/sebastian | manage.py | Python | gpl-2.0 | 252 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sebastian.settings")
from django.core.m | anagement import execute_from_command_line
execute_from_c | ommand_line(sys.argv)
|
EmreAtes/spack | var/spack/repos/builtin/packages/nekbone/package.py | Python | lgpl-2.1 | 3,143 | 0.000318 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Nekbone(Package):
"""NEK5000 emulation software called NEKbone. Nekbone captures the basic
| structure and user interface of the extensive Nek5000 software.
Nek5000 is a high order, incompressible Navier-Stokes solver based on
the spectral element method."""
homepage = "https | ://github.com/Nek5000/Nekbone"
url = "https://github.com/Nek5000/Nekbone/tarball/v17.0"
tags = ['proxy-app', 'ecp-proxy-app']
version('17.0', 'cc339684547614a0725959e41839fec1', git='https://github.com/Nek5000/Nekbone.git')
version('develop', git='https://github.com/Nek5000/Nekbone.git')
# Variants
variant('mpi', default=True, description='Build with MPI')
# dependencies
depends_on('mpi', when='+mpi')
@run_before('install')
def fortran_check(self):
if not self.compiler.fc:
msg = 'Nekbone can not be built without a Fortran compiler.'
raise RuntimeError(msg)
def install(self, spec, prefix):
mkdir(prefix.bin)
FC = self.compiler.fc
CC = self.compiler.cc
if '+mpi' in spec:
FC = spec['mpi'].mpif77
CC = spec['mpi'].mpicc
# Install Nekbone in prefix.bin
install_tree("../Nekbone", prefix.bin.Nekbone)
# Install scripts in prefix.bin
nekpmpi = 'test/example1/nekpmpi'
makenek = 'test/example1/makenek'
install(makenek, prefix.bin)
install(nekpmpi, prefix.bin)
with working_dir(prefix.bin):
filter_file(r'^SOURCE_ROOT\s*=.*', 'SOURCE_ROOT=\"' +
prefix.bin.Nekbone + '/src\"', 'makenek')
filter_file(r'^CC\s*=.*', 'CC=\"' + CC + '\"', 'makenek')
filter_file(r'^F77\s*=.*', 'F77=\"' + FC + '\"', 'makenek')
if '+mpi' not in spec:
filter_file(r'^#IFMPI=\"false\"', 'IFMPI=\"false\"', 'makenek')
|
dphiffer/dna-codec | lib/dnacloud/source/decode.py | Python | mit | 19,780 | 0.039383 | """
Author: Shalin Shah
Project: DNA Cloud
Graduate Mentor: Dixita Limbachya
Mentor: Prof. Manish K Gupta
Date: 5 November 2013
Website: www.guptalab.org/dnacloud
This module contains method to decode the given dnac file.
"""
from cStringIO import StringIO
import sqlite3
import sqlite3 as lite
import unicodedata
import time
import csv
import sys
import HuffmanDictionary
#import wx
#import psutil
import thread
import os
import gc
import extraModules
if hasattr(sys, "frozen"):
PATH = os.path.dirname(sys.executable)
else:
PATH = os.path.dirname(os.path.abspath(__file__))
#print PATH , "decode"
def decode(readPath,savePath):
con = sqlite3.connect(PATH + '/../database/prefs.db')
with con:
cur = con.cursor()
WORKSPACE_PATH = cur.execute('SELECT * FROM prefs WHERE id = 8').fetchone()[1]
if "linux" in sys.platform:
WORKSPACE_PATH = unicodedata.normalize('NFKD', WORKSPACE_PATH).encode('ascii','ignore')
if not os.path.isdir(WORKSPACE_PATH + '/.temp'):
os.mkdir(WORKSPACE_PATH + '/.temp')
degenrateDNAList(readPath,WORKSPACE_PATH)
degenrateDNAString(readPath,savePath,WORKSPACE_PATH)
def degenrateDNAString(readPath,savePath,WORKSPACE_PATH):
try:
#xtemp = readPath.split(".")
#if "win" in sys.platform and not 'darwin' in sys.platform:
# dnaFile = open(WORKSPACE_PATH + '\.temp\dnaString.txt',"rb")
# fileSize = os.path.getsize(WORKSPACE_PATH + '\.temp\dnaString.txt')
#elif "linux" in sys.platform or 'darwin' in sys.platform:
# print(WORKSPACE_PATH + '/.temp/dnaString.txt')
# dnaFile = open(WORKSPACE_PATH + '/.temp/dnaString.txt',"rb")
# fileSize = os.path.getsize(WORKSPACE_PATH + '/.temp/dnaString.txt')
#decodedFile = file(PATH + '\\..\\decodedFiles\\decode','wb')
#if len(xtemp) == 3:
# decodedFile = file(savePath+ "." + xtemp[1],'wb')
#else:
dnaFile = open(readPath, "rb")
fileSize = os.path.getsize(readPath)
decodedFile = file(savePath,'wb')
dnaFile.seek(fileSize - 21,0)
temp = dnaFile.read()
temp = extraModules.DNABaseToBase3WithChar(temp[1:],temp[0])
dnaLength = extraModules.base3ToDecimal(temp)
fileSize = dnaLength
dnaFile.seek(0,0)
CHUNK_SIZE = 5000000
if (fileSize % CHUNK_SIZE) == 0:
if (fileSize/CHUNK_SIZE) == 0:
noOfFileChunks = 1
else:
noOfFileChunks = (fileSize/CHUNK_SIZE)
else:
noOfFileChunks = (fileSize/CHUNK_SIZE) + 1
#print "No of Chunks" , noOfFileChunks
dnaLength = 0
#print "Chunk No : 1"
if noOfFileChunks > 1:
tempString = StringIO()
tempString.write(dnaFile.read(CHUNK_SIZE))
dnaString = tempString.getvalue()
base3String = extraModules.DNABaseToBase3(dnaString)
asciiList = HuffmanDictionary.base3ToAscii(base3String)
j = 0
prependString = ""
while asciiList == None:
j = j-1
asciiList = HuffmanDictionary.base3ToAscii(base3String[0:j])
prependString = dnaString[j] + prependString
string = extraModules.asciiToString(asciiList)
decodedFile.write(string)
temp = dnaString[-1]
del tempString
del asciiList
del string
for chunk_number in range(1,noOfFileChunks - 1):
#print "Chunk No :",chunk_number + 1
tempString = StringIO()
tempString.write(prependString)
tempString.write(dnaFile.read(CHUNK_SIZE))
#for i in fileToChunks.file_block(dnaFile, noOfFileChunks, chunk_number):
# tempString.write(i)
# dnaLength = dnaLength + len(i)
dnaString = tempString.getvalue()
base3String = extraModules.DNABaseToBase3WithChar(dnaString,temp)
asciiList = HuffmanDictionary.base3ToAscii(base3String)
j = 0
prependString = ""
while asciiList == None:
j = j-1
asciiList = HuffmanDictionary.base3ToAscii(base3String[0:j])
prependString = dnaString[j] + prependString
string = extraModules.asciiToString(asciiList)
decodedFile.write(string)
#dnaFile.flush()
decodedFile.flush()
temp = dnaString[j-1]
del string
del asciiList
del tempString
del dnaString
del base3String
#print "Chunk No:",noOfFileChunks
tempString = StringIO()
tempString.write(prependString)
tempString.write(dnaFile.read(fileSize - (noOfFileChunks - 1) * CHUNK_SIZE))
dnaString = tempString.getvalue()
base3String = extraModules.DNABaseToBase3WithChar(dnaString,temp)
asciiList = HuffmanDictionary.base3ToAscii(base3String)
string = extraModules.asciiToString(asciiList)
decodedFile.write(string)
#dnaFile.flush()
decodedFile.flush()
del string
del asciiList
del tempString
del prependString
del dnaString
else:
tempString = StringIO()
tempString.write(dnaFile.read(fileSize))
dnaString = tempString.getvalue()
base3String = extraModules.DNABaseToBase3(dnaString)
asciiList = HuffmanDictionary.base3ToAscii(base3String)
string = extraModules.asciiToString(asciiList)
decodedFile.write(string)
del tempString
del asciiList
del string
decodedFile.close()
dnaFile.close()
except MemoryError:
return -1
def degenrateDNAList(readPath,WORKSPACE_PATH):
try:
fileOpened = open(readPath,"rb")
#dnaFile = None
if "win" in sys.platform and not 'darwin' in sys.platform:
dnaFile = file(WORKSPACE_PATH + "\.temp\dnaString.txt","wb")
elif "linux" in sys.platform or 'darwin' in sys.platform:
dnaFile = file(WORKSPACE_PATH + "/.temp/dnaString.txt","wb")
dnaLength = 0
#fileSize = os.path.getsize(PATH + "/../.temp/dnaList.txt")
fileSize = os.path.getsize(readPath)
CHUNK_SIZE = 10000000
if (fileSize % CHUNK_SIZE) == 0:
if (fileSize/CHUN | K_SIZE) == 0:
noOfFileChunks = 1
else:
noOfFileChunks = | (fileSize/CHUNK_SIZE)
else:
noOfFileChunks = (fileSize/CHUNK_SIZE) + 1
#print "No of Chunks" , noOfFileChunks
if noOfFileChunks > 1:
#print "Chunk No : 1"
dnaList = fileOpened.read(CHUNK_SIZE)
prependString = ""
j = -1
while True:
if dnaList[j] == ',':
break
prependString = dnaList[j] + prependString
j -= 1
#print j , prependString
tempList = dnaList[:j].split(",")
dnaString = StringIO()
for i in xrange(len(tempList)):
if tempList[i][0] != " ":
if tempList[i][1] == "T":
dnaString.write(extraModules.reverseCompliment(tempList[i][2:27]))
dnaLength += 25
else:
dnaString.write(tempList[i][2:27])
dnaLength += 25
else:
if tempList[i][2] == "T":
dnaString.write(extraModules.reverseCompliment(tempList[i][3:28]))
dnaLength += 25
else:
dnaString.write(tempList[i][3:28])
dnaLength += 25
dnaFile.write(dnaString.getvalue())
dnaFile.flush()
#fileOpened.flush()
del tempList
del dnaString
del j
del dnaList
for chunk_number in xrange(1,noOfFileChunks-1):
#print "Chunk No :" , chunk_number + 1
dnaString = StringIO()
tempList = prependString
dnaList = fileOpened.read(CHUNK_SIZE)
prependString = ""
j = -1
while True:
if dnaList[j] == ',':
break
prependString = dnaList[j] + prependString
j -= 1
#print j , prependString
tempList = (tempList + dnaList[:j]).split(",")
for i in xrange(len(tempList)):
if tempList[i][0] != " ":
if tempList[i][1] == "T":
dnaString.write(extraModules.reverseCompliment(tempList[i][2:27]))
dnaLength += 25
else:
dnaString.write(tempList[i][2:27])
dnaLength += 25
else:
if tempList[i][2] == "T":
dnaString.write(extraModules.reverseCompliment(tempList[i][3:28]))
dnaLength += 25
else:
dnaString.write(tempList[i][3:28])
dnaLength += 25
dnaFile.write(dnaString.getvalue())
dnaFile.flush()
#fileOpened.flush()
del dnaString
del tempList
del j
del dnaList
#print "Chunk No :",noOfFileChunks
dnaString = StringIO()
tempList = prependString
dnaList = fileOpened.read()
j = -1
prependString = ""
while True:
if dnaList[j] == ',':
break
prependString = dnaList[j] + prependString
|
sharifyounes/wapi | wapi/tests/test_fields_required.py | Python | gpl-3.0 | 6,170 | 0.003404 |
from flask import jsonify
from unittest import TestCase
from wapi.tests import TestMixin
from wapi import fields_required
import json
class TestFieldsRequired(TestMixin, TestCase):
def setUp(self):
super(TestFieldsRequired, self).setUp()
@self.app.route("/")
@fields_required([("emd5", "email"), "taxonomy", "taxonomy.brand"])
def good():
return "good"
@self.app.route("/list/")
@fields_required([("emd5", "email"), "taxonomy", "taxonomy.brand"], iterable=True)
def good_list():
return "good"
def | test_no_data(self):
r = json.loads(self.c.get("/").response.next())
self.assertEqual(r["Error"], "Missing field.")
try:
try:
self.assertEqual(r["Missing field"], "Missing field.")
except AssertionError:
self.assertEqual(r["Missing field"], "Missing field.")
except AssertionError:
self.assertItemsEqual(r["Missing field"], ["emd5", "email"])
|
def test_missing_all_data_single(self):
payload = {}
r = json.loads(self.c.get("/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
try:
try:
self.assertEqual(r["Missing field"], "taxonomy.brand")
except AssertionError:
self.assertEqual(r["Missing field"], "taxonomy")
except AssertionError:
self.assertItemsEqual(r["Missing field"], ["emd5", "email"])
def test_missing_emd5_email_single(self):
payload = {
"taxonomy": {"brand": "good"}
}
r = json.loads(self.c.get("/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
self.assertItemsEqual(r["Missing field"], ["emd5", "email"])
def test_missing_taxonomy_single(self):
payload = {
"email": "foo"
}
r = json.loads(self.c.get("/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
try:
self.assertEqual(r["Missing field"], "taxonomy.brand")
except AssertionError:
self.assertEqual(r["Missing field"], "taxonomy")
def test_missing_taxonomy_brand_still_dict_single(self):
payload = {
"email": "foo",
"taxonomy": {"bar": "baz"}
}
r = json.loads(self.c.get("/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
self.assertEqual(r["Missing field"], "taxonomy.brand")
def test_missing_taxonomy_brand_not_dict_single(self):
payload = {
"email": "foo",
"taxonomy": "bar"
}
r = json.loads(self.c.get("/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
self.assertEqual(r["Missing field"], "taxonomy.brand")
def test_working_single(self):
payload = {
"email": "foo",
"taxonomy": {"brand": "bar"}
}
r = self.c.get("/", data=json.dumps(payload)).response.next()
self.assertEqual(r, "good")
def test_list_for_single(self):
payload = [{
"email": "foo",
"taxonomy": {"brand": "bar"}
}]
r = json.loads(self.c.get("/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Must be dictionary.")
def test_missing_all_data_list(self):
payload = []
r = self.c.get("/list/", data=json.dumps(payload)).response.next()
self.assertEqual(r, "good")
def test_missing_emd5_email_list(self):
payload = [{
"taxonomy": {"brand": "good"}
}]
r = json.loads(self.c.get("/list/", data=json.dumps(payload)).response.next())
self.assertItemsEqual(r["Error"], "Missing field.")
self.assertItemsEqual(r["Missing field"], ["emd5", "email"])
def test_missing_taxonomy_list(self):
payload = [{
"email": "foo"
}]
r = json.loads(self.c.get("/list/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
try:
self.assertEqual(r["Missing field"], "taxonomy.brand")
except AssertionError:
self.assertItemsEqual(r["Missing field"], "taxonomy")
def test_missing_taxonomy_brand_still_dict_list(self):
payload = [{
"email": "foo",
"taxonomy": {"bar": "baz"}
}]
r = json.loads(self.c.get("/list/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
self.assertEqual(r["Missing field"], "taxonomy.brand")
def test_missing_taxonomy_brand_not_dict_list(self):
payload = [{
"email": "foo",
"taxonomy": "bar"
}]
r = json.loads(self.c.get("/list/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
self.assertEqual(r["Missing field"], "taxonomy.brand")
def test_working_list(self):
payload = [{
"email": "foo",
"taxonomy": {"brand": "bar"}
}]
r = self.c.get("/list/", data=json.dumps(payload)).response.next()
self.assertEqual(r, "good")
def test_bad_list_good_items(self):
payload = [{
"email": "foo",
"taxonomy": {"brand": "bar"}
},
{
"email": "foo"
}]
r = json.loads(self.c.get("/list/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Missing field.")
self.assertEqual(r["Missing field"], "taxonomy")
def test_single_for_list(self):
payload = {
"email": "foo",
"taxonomy": {"brand": "bar"}
}
r = json.loads(self.c.get("/list/", data=json.dumps(payload)).response.next())
self.assertEqual(r["Error"], "Must be list.")
|
oculusstorystudio/kraken | unittests/core/objects/test_transform.py | Python | bsd-3-clause | 365 | 0 |
import unittest
from kraken.core.objects.transform import Transform
class Te | stTransform(unittest.TestCase):
def testInstance(self):
transform = Transform('test')
self.assertIsNotNone(transform)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(TestTransform)
if __name__ == '__main__':
unittest.main(verbosity= | 2)
|
disqus/nose-unittest | setup.py | Python | apache-2.0 | 569 | 0 | #!/usr/bin/env python
from setuptools import setup, find_package | s
setup(
name='nose-unittest',
version='0.1.1',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/nose-unittest',
package_dir={'': 'src'},
packages=find_packages('src'),
zip_safe=False,
install_requires=[
'nose>=0.9',
],
entry_points={
'nose.plugins.0.10': [
'nose_unittest = nose_unittest.plugin:UnitTestPlugin'
]
},
license=' | Apache License 2.0',
include_package_data=True,
)
|
Findspire/workflow | workflow/apps/workflow/urls.py | Python | mit | 2,979 | 0.007385 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
# Copyright (c) 2015 Findspire
from django.conf.urls import patterns, url
from workflow.apps.workflow import views
urlpatterns = patterns('workflow.apps.workflow',
url(r'^$', 'views.index', name='index'),
url(r'^project/new/$', views.project_new | , name='project_new'),
url(r'^project/edit/(?P<project_pk>\d+)/$', views.project_edit, name='project_edit'),
url(r'^project/list/$', views.proj | ect_list, name='project_list'),
url(r'^workflow/reset/(?P<workflow_pk>\d+)/$', views.reset_item_validation,
name='workflow_reset'),
url(r'^workflow/new/$', views.workflow_create, name='workflow_new'),
url(r'^workflow/new/(?P<project_pk>\d+)/$', views.workflow_create, name='workflow_new'),
url(r'^workflow/edit/(?P<workflow_pk>\d+)/$', views.workflow_edit, name='workflow_edit'),
url(r'^workflow/show/(?P<workflow_pk>\d+)/(?P<which_display>\w+)/$', views.workflow_show, name='workflow_show'),
url(r'^workflow/(?P<workflow_pk>\d+)/delete/$', views.workflow_delete, name='workflow_delete'),
url(r'^item/model/new/$', views.ItemModelFormView.as_view(), name='item_model_new'),
url(r'^item/model/new/category/(?P<category>\d+)$', views.ItemModelFormView.as_view(), name='item_model_new'),
url(r'^item/model/new/workflow/(?P<workflow_pk>\d+)$', views.ItemModelFormViewFromWorkflow.as_view(), name='item_model_add_to_workflow'),
url(r'^item/model/new/workcat/(?P<workflow_pk>\d+)/(?P<category>\d+)$', views.create_item_view, name='item_model_add_to_workcat'),
url(r'^item/model/edit/(?P<pk>\d+)/$', views.ItemModelFormView.as_view(), name='item_model_edit'),
url(r'^item/model/list/$', views.itemmodel_list, name='item_model_list'),
url(r'^item/category/new/$', views.ItemCategoryFormView.as_view(), name='item_category_new'),
url(r'^item/category/new/(?P<workflow_pk>\d+)/$', views.ItemCategoryFormView.as_view(), name='item_category_new'),
url(r'^item/category/edit/(?P<pk>\d+)/$', views.ItemCategoryFormView.as_view(), name='item_category_edit'),
url(r'^item/category/(?P<workflow_pk>\d+)/(?P<category_pk>\d+)/$', views.item_category_delete, name='item_category_delete'),
url(r'^item/instance/show/(?P<item_pk>\d+)/$', views.item_instance_show, name='item_instance_show'),
url(r'^item/instance/comment/(?P<workflow_pk>\d+)/(?P<comment_pk>\d+)/delete/$', views.delete_comment_view, name='delete_comment_view'),
url(r'^update/(?P<which_display>\w+)/(?P<action>\w+)/(?P<model>\w+)/(?P<pk>\d+)/$', views.update, name='update'),
url(r'^update/(?P<which_display>\w+)/(?P<action>\w+)/(?P<model>\w+)/(?P<pk>\d+)/(?P<pk_other>\d+)/$', views.update, name='update'),
url(r'^update/(?P<action>\w+)/(?P<item_pk>\d+)/', views.update_item_validation, name='update_item_validation'),
url(r'^workflow/(?P<workflow_pk>[0-9]+)/(?P<category_pk>[0-9]+)/(?P<action>\w+)/', views.take_items_category, name='take_items_category'),
)
|
mosajjal/mitmproxy | test/pathod/test_log.py | Python | mit | 480 | 0.002083 | import io
from pathod import log
from mitmproxy import exceptions
class DummyIO(io.StringIO):
def start_log(self, *args, **kwargs):
pass
def get_log(self, *args, **kwargs):
return ""
def test_disconnect():
outf = DummyIO()
rw | = DummyIO()
l = log.ConnectionLogger(outf, False, True, rw, rw)
| try:
with l.ctx() as lg:
lg("Test")
except exceptions.TcpDisconnect:
pass
assert "Test" in outf.getvalue()
|
cwtaylor/viper | viper/modules/jar.py | Python | bsd-3-clause | 1,994 | 0.001003 | # This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
import hashlib
import zipfile
from viper.common.abstracts import Module
from viper.core.session import __sessions__
class Jar(Module):
cmd = 'jar'
description = 'Parse Java JAR archives'
authors = ['Kevin Breen']
def __init__(self):
super(Jar, self).__init__()
self.parser.add_argument('-d ', '--dump', metavar='dump_path', help='Extract all items from jar')
def run(self):
def read_manifest(manifest):
rows = []
lines = manifest.split('\r\n')
for line in lines:
if len(line) > 1:
item, value = line.split(':')
ro | ws.append([item, value])
self.log('info', "Manifest File:")
self.log('table', dict(header=['Item', 'Value'], rows=rows))
super(Jar, self).run()
if self.args is None:
return
arg_dump = self.args.dump
if not __sessions__.is_set():
self.log('error', "No open session")
return
if not zip | file.is_zipfile(__sessions__.current.file.path):
self.log('error', "Doesn't Appear to be a valid jar archive")
return
with zipfile.ZipFile(__sessions__.current.file.path, 'r') as archive:
jar_tree = []
for name in archive.namelist():
item_data = archive.read(name)
if name == 'META-INF/MANIFEST.MF':
read_manifest(item_data)
item_md5 = hashlib.md5(item_data).hexdigest()
jar_tree.append([name, item_md5])
self.log('info', "Jar Tree:")
self.log('table', dict(header=['Java File', 'MD5'], rows=jar_tree))
if arg_dump:
archive.extractall(arg_dump)
self.log('info', "Archive content extracted to {0}".format(arg_dump))
|
oVirt/vdsm | tests/hostdev_test.py | Python | gpl-2.0 | 10,768 | 0 | #
# Copyright 2014-2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import six
from vdsm.common import exception
from vdsm.common import xmlutils
from vdsm.virt.vmdevices import network, hwclass
from testlib import VdsmTestCase as TestCaseBase, XMLTestCase
from testlib import permut | ations, expandPermutations
from monkeypatch import MonkeyClass, MonkeyPatchScope
from testValidation import skipif
from vdsm.common import hooks
from vdsm.common import hostdev
from vdsm.common import libvirtconnection
import hostdevlib
@expandPermutations
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
@MonkeyClass(hooks, 'after_hostdev_list_by_caps', lambda j | son: json)
@MonkeyClass(hostdev, '_get_udev_block_mapping',
lambda: hostdevlib.UDEV_BLOCK_MAP)
class HostdevTests(TestCaseBase):
def testProcessDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.ADDITIONAL_DEVICE).XMLDesc()
)
self.assertEqual(
hostdevlib.ADDITIONAL_DEVICE_PROCESSED,
deviceXML
)
@skipif(six.PY3, "Not relevant in Python 3 libvirt")
# libvirt in Python 3 returns strings, so we don't deal with
# invalid coding anymore.
def testProcessDeviceParamsInvalidEncoding(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.COMPUTER_DEVICE).XMLDesc()
)
self.assertEqual(
hostdevlib.COMPUTER_DEVICE_PROCESSED,
deviceXML
)
def testProcessSRIOV_PFDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.SRIOV_PF).XMLDesc()
)
self.assertEqual(
hostdevlib.SRIOV_PF_PROCESSED,
deviceXML
)
def testProcessSRIOV_VFDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.SRIOV_VF).XMLDesc()
)
self.assertEqual(hostdevlib.SRIOV_VF_PROCESSED, deviceXML)
def testProcessNetDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.NET_DEVICE).XMLDesc()
)
self.assertEqual(hostdevlib.NET_DEVICE_PROCESSED, deviceXML)
def testProcessMdevDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.MDEV_DEVICE).XMLDesc()
)
self.assertEqual(hostdevlib.MDEV_DEVICE_PROCESSED, deviceXML)
def testGetDevicesFromLibvirt(self):
libvirt_devices, _ = hostdev._get_devices_from_libvirt()
self.assertEqual(hostdevlib.DEVICES_PROCESSED, libvirt_devices)
self.assertEqual(len(libvirt_devices),
len(hostdevlib.PCI_DEVICES) +
len(hostdevlib.USB_DEVICES) +
len(hostdevlib.SCSI_DEVICES))
@permutations([[''], [('pci',)], [('usb_device',)],
[('pci', 'usb_device')]])
def testListByCaps(self, caps):
devices = hostdev.list_by_caps(caps)
for cap in caps:
self.assertTrue(set(hostdevlib.DEVICES_BY_CAPS[cap].keys()).
issubset(set(devices.keys())))
@permutations([
# addr_type, addr, name
('usb', {'bus': '1', 'device': '2'}, 'usb_1_1'),
('usb', {'bus': '1', 'device': '10'}, 'usb_1_1_4'),
('pci', {'slot': '26', 'bus': '0', 'domain': '0', 'function': '0'},
'pci_0000_00_1a_0'),
('scsi', {'bus': '0', 'host': '1', 'lun': '0', 'target': '0'},
'scsi_1_0_0_0'),
])
def test_device_name_from_address(self, addr_type, addr, name):
# we need to make sure we scan all the devices (hence caps=None)
hostdev.list_by_caps()
self.assertEqual(
hostdev.device_name_from_address(addr_type, addr),
name
)
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection.get)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
@MonkeyClass(hooks, 'after_hostdev_list_by_caps', lambda json: json)
class HostdevPerformanceTests(TestCaseBase):
def test_3k_storage_devices(self):
with hostdevlib.Connection.use_hostdev_tree():
self.assertEqual(
len(hostdev.list_by_caps()),
len(libvirtconnection.get().listAllDevices())
)
@expandPermutations
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
class HostdevCreationTests(XMLTestCase):
_PCI_ADDRESS = {'slot': '0x02', 'bus': '0x01', 'domain': '0x0000',
'function': '0x0', 'type': 'pci'}
_PCI_ADDRESS_XML = '<address bus="0x01" domain="0x0000" function="0x0" \
slot="0x02" type="pci"/>'
def setUp(self):
self.conf = {
'vmName': 'testVm',
'vmId': '9ffe28b6-6134-4b1e-8804-1185f49c436f',
'smp': '8', 'maxVCpus': '160',
'memSize': '1024', 'memGuaranteedSize': '512'}
# TODO: next 2 tests should reside in their own module (interfaceTests.py)
def testCreateSRIOVVF(self):
dev_spec = {'type': hwclass.NIC, 'device': 'hostdev',
'hostdev': hostdevlib.SRIOV_VF,
'macAddr': 'ff:ff:ff:ff:ff:ff',
'specParams': {'vlanid': 3},
'bootOrder': '9'}
device = network.Interface(self.log, **dev_spec)
self.assertXMLEqual(
xmlutils.tostring(device.getXML()),
hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % ('',))
def testCreateSRIOVVFWithAddress(self):
dev_spec = {'type': hwclass.NIC, 'device': 'hostdev',
'hostdev': hostdevlib.SRIOV_VF,
'macAddr': 'ff:ff:ff:ff:ff:ff',
'specParams': {'vlanid': 3},
'bootOrder': '9', 'address':
{'slot': '0x02', 'bus': '0x01', 'domain': '0x0000',
'function': '0x0', 'type': 'pci'}}
device = network.Interface(self.log, **dev_spec)
self.assertXMLEqual(
xmlutils.tostring(device.getXML()),
hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % (
self._PCI_ADDRESS_XML
)
)
@expandPermutations
@MonkeyClass(hostdev, '_each_supported_mdev_type', hostdevlib.fake_mdev_types)
@MonkeyClass(hostdev, '_mdev_type_details', hostdevlib.fake_mdev_details)
@MonkeyClass(hostdev, '_mdev_device_vendor', hostdevlib.fake_mdev_vendor)
@MonkeyClass(hostdev, '_mdev_type_devices', hostdevlib.fake_mdev_instances)
@MonkeyClass(hostdev, 'supervdsm', hostdevlib.FakeSuperVdsm())
class TestMdev(TestCaseBase):
def setUp(self):
def make_device(name):
mdev_types = [
|
expertanalytics/fagkveld | worldmap/src/worldmap/__init__.py | Python | bsd-2-clause | 52 | 0.019231 |
__all__ = [
'DTM',
]
from | .mode | l import DTM
|
Tanoshinderuyo/Python | Calendar/A2Jahreskalender/calendar.py | Python | gpl-2.0 | 9,023 | 0.066533 | import calfunctions
from tkinter import *
#Main-File
#verwendet: schaltjahr(jahr), monatslaenge(jahr,monat), wochentag(jahr,monat,tag)
def kalender(jahr):
#Listen für schnellen Zugriff auf Monatsname und Wochentag
monatsname = ['Januar','Februar','März','April','Mai','Juni','Juli','August','September','Oktober','November','Dezember']
tagnamelang = ['Sonntag','Montag','Dienstag','Mittwoch','Donnerstag','Freitag','Samstag']
tagname = ['So','Mo','Di','Mi','Do','Fr','Sa']
##Erstellung der Fenster
##Fenster 1 für den Kalender
##Fenster 2 für die Feiertage
fenster=Tk()
fenster2=Tk()
fenster3=Tk()
windows = [fenster, fenster2, fenster3]
fenster.title("Kalender des Jahres "+str(jahr)+" Januar bis April")
fenster2.title("Kalender des Jahres "+str(jahr)+" Mai bis August")
fenster3.title("Kalender des Jahres "+str(jahr)+" September bis Dezember")
feiertage=Tk()
feiertage.title("Feiertage des Jahres "+str(jahr))
#============================================
# Erzeugung Tabelle
#============================================
reihe=0
for monat in range(1,5,1):
#header Monatsname + Tagliste
###print(monatsname[monat]) #default: print(monatsname[monat],end='\n')
Label(text=monatsname[monat-1],width=15, relief=GROOVE, bg='green', fg='white').grid(row=reihe,column=0) ###beginnt bei 0, siehe Tage
reihe+=1 ###hat null einfluss
for tag in range(1,8,1):
Label(text=tagname[tag%7],width=15, relief=GROOVE).grid(row=reihe, column=tag-1)
reihe+=1 ###wochdentagsüberschriften
monatlang=calfunctions.monatslaenge(jahr,monat)#laenge des aktuellen monates
#============================================
# Einträge in Tabelle
#============================================
w = calfunctions.wochentag(jahr,monat,1)
#fügt Leerzeichen ein vor erstem Eintrag in Monat
###BEACHTE COLUMN BEGINNEN BEI 0
if (w!=1):
if(w!=0):
###print('\t'*(w-1)+'1',end='')
#hier noch einmal die empty-labels durchgucken, es müssen mehr gedruckt werden
for i in range(0,w+1,1):
Label(text='',width=15, relief=GROOVE).grid(row=reihe, column=i)
Label(text='1',width=15, relief=GROOVE).grid(row=reihe, column=w-1)
else:
###print('\t'*6+'1',end='\n') #gibt 6* '\t' aus
for i in range(0,6,1):
Label(text='',width=15, relief=GROOVE).grid(row=reihe, column=i)
Label(text='1',width=15, relief=GROOVE).grid(row=reihe, column=6)#column6, da Sonntag
reihe+=1
else:
###print(str(1),end='')
Label(text='1',width=15, relief=GROOVE).grid(row=reihe, column=0)#w-1 = Montag ist 1
#schreibt die konsekutiven Wochentage
zaehler = 2
for tag in range(2,monatlang+1,1): # Frage: was ist schneller? range(1,calfunctions.monatslaenge(monat))
w+=1
if(w%7==0): #Sonntag
###print('\t'+str(zaehler),end='\n')
Label(text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7+6)
reihe+=1
elif (w%7==1):#Montag
Label(text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7-1)
else:
###dürfte no Need sein, da grid Label(text='',width=15, relief=GROOVE).grid(row=reihe, column=w%7)
Label(text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7-1)
zaehler+=1
reihe+=1 ###abstand nach letztem Wochentag
reihe=0
for monat in range(5,9,1):
#header Monatsname + Tagliste
###print(monatsname[monat]) #default: print(monatsname[monat],end='\n')
Label(fenster2, text=monatsname[monat-1],widt | h=15, relief=GROOVE, bg='green', fg='white').grid(row=reihe,column=0) ###beginnt bei 0, siehe Tage
reihe+=1 ###hat null einfluss
for tag in range(1,8,1):
Label(fenster2, text=tagname[tag%7],width=15, relief=GROOVE).grid(row=reihe, column=tag-1)
reihe+=1 ###wochdentagsüberschriften
monatlang=calfunctions.monatslaenge(jahr,monat)#laenge des aktuellen monates
#============================================
# Einträge in Tabelle
#============= | ===============================
w = calfunctions.wochentag(jahr,monat,1)
#fügt Leerzeichen ein vor erstem Eintrag in Monat
###BEACHTE COLUMN BEGINNEN BEI 0
if (w!=1):
if(w!=0):
###print('\t'*(w-1)+'1',end='')
#hier noch einmal die empty-labels durchgucken, es müssen mehr gedruckt werden
for i in range(0,w+1,1):
Label(fenster2, text='',width=15, relief=GROOVE).grid(row=reihe, column=i)
Label(fenster2, text='1',width=15, relief=GROOVE).grid(row=reihe, column=w-1)
else:
###print('\t'*6+'1',end='\n') #gibt 6* '\t' aus
for i in range(0,6,1):
Label(fenster2, text='',width=15, relief=GROOVE).grid(row=reihe, column=i)
Label(fenster2, text='1',width=15, relief=GROOVE).grid(row=reihe, column=6)#column6, da Sonntag
reihe+=1
else:
###print(str(1),end='')
Label(fenster2, text='1',width=15, relief=GROOVE).grid(row=reihe, column=0)#w-1 = Montag ist 1
#schreibt die konsekutiven Wochentage
zaehler = 2
for tag in range(2,monatlang+1,1): # Frage: was ist schneller? range(1,calfunctions.monatslaenge(monat))
w+=1
if(w%7==0): #Sonntag
###print('\t'+str(zaehler),end='\n')
Label(fenster2, text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7+6)
reihe+=1
elif (w%7==1):#Montag
Label(fenster2, text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7-1)
else:
###dürfte no Need sein, da grid Label(text='',width=15, relief=GROOVE).grid(row=reihe, column=w%7)
Label(fenster2, text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7-1)
zaehler+=1
reihe+=1 ###abstand nach letztem Wochentag
reihe=0
for monat in range(9,13,1):
#header Monatsname + Tagliste
###print(monatsname[monat]) #default: print(monatsname[monat],end='\n')
Label(fenster3, text=monatsname[monat-1],width=15, relief=GROOVE, bg='green', fg='white').grid(row=reihe,column=0) ###beginnt bei 0, siehe Tage
reihe+=1 ###hat null einfluss
for tag in range(1,8,1):
Label(fenster3, text=tagname[tag%7],width=15, relief=GROOVE).grid(row=reihe, column=tag-1)
reihe+=1 ###wochdentagsüberschriften
monatlang=calfunctions.monatslaenge(jahr,monat)#laenge des aktuellen monates
#============================================
# Einträge in Tabelle
#============================================
w = calfunctions.wochentag(jahr,monat,1)
#fügt Leerzeichen ein vor erstem Eintrag in Monat
###BEACHTE COLUMN BEGINNEN BEI 0
if (w!=1):
if(w!=0):
###print('\t'*(w-1)+'1',end='')
#hier noch einmal die empty-labels durchgucken, es müssen mehr gedruckt werden
for i in range(0,w+1,1):
Label(fenster3, text='',width=15, relief=GROOVE).grid(row=reihe, column=i)
Label(fenster3,text='1',width=15, relief=GROOVE).grid(row=reihe, column=w-1)
else:
###print('\t'*6+'1',end='\n') #gibt 6* '\t' aus
for i in range(0,6,1):
Label(fenster3, text='',width=15, relief=GROOVE).grid(row=reihe, column=i)
Label(fenster3, text='1',width=15, relief=GROOVE).grid(row=reihe, column=6)#column6, da Sonntag
reihe+=1
else:
###print(str(1),end='')
Label(fenster3, text='1',width=15, relief=GROOVE).grid(row=reihe, column=0)#w-1 = Montag ist 1
#schreibt die konsekutiven Wochentage
zaehler = 2
for tag in range(2,monatlang+1,1): # Frage: was ist schneller? range(1,calfunctions.monatslaenge(monat))
w+=1
if(w%7==0): #Sonntag
###print('\t'+str(zaehler),end='\n')
Label(fenster3, text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7+6)
reihe+=1
elif (w%7==1):#Montag
Label(fenster3, text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7-1)
else:
###dürfte no Need sein, da grid Label(text='',width=15, relief=GROOVE).grid(row=reihe, column=w%7)
Label(fenster3, text=str(zaehler),width=15, relief=GROOVE).grid(row=reihe, column=w%7-1)
zaehler+=1
reihe+=1 ###abstand nach letztem Wochentag
# Gibt die Feiertage aus
###Überschriften
Label(feiertage, text='Ereignis',width=20,bg='green',fg='white', relief=GROOVE).grid(row=0, column=0)
Label(feiertage, text='Tag',width=7,bg='green',fg='white', relief=GROOVE).grid(row=0, column=1)
Label(feiertage, tex |
JulienPalard/wicd | wicd/monitor.py | Python | gpl-2.0 | 14,528 | 0.001996 | #!/usr/bin/env python3
""" monitor -- connection monitoring process
This process is spawned as a child of the daemon, and is responsible
for monitoring connection status and initiating autoreconnection
when appropriate.
"""
#
# Copyright (C) 2007 - 2009 Adam Blackburn
# Copyright (C) 2007 - 2009 Dan O'Reilly
#
# This program is free software; you can redistribute it | and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have recei | ved a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import gobject
import time
from dbus import DBusException
from wicd import wpath
from wicd import misc
from wicd import dbusmanager
misc.RenameProcess("wicd-monitor")
if __name__ == '__main__':
wpath.chdir(__file__)
dbusmanager.connect_to_dbus()
dbus_dict = dbusmanager.get_dbus_ifaces()
daemon = dbus_dict["daemon"]
wired = dbus_dict["wired"]
wireless = dbus_dict["wireless"]
mainloop = None
def diewithdbus(func):
"""
Decorator catching DBus exceptions, making wicd quit.
"""
def wrapper(self, *__args, **__kargs):
try:
ret = func(self, *__args, **__kargs)
self.__lost_dbus_count = 0
return ret
except DBusException as e:
print("Caught exception %s" % str(e))
if not hasattr(self, "__lost_dbus_count"):
self.__lost_dbus_count = 0
if self.__lost_dbus_count > 3:
mainloop.quit()
self.__lost_dbus_count += 1
return True
wrapper.__name__ = func.__name__
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
return wrapper
class ConnectionStatus(object):
""" Class for monitoring the computer's connection status. """
def __init__(self):
""" Initialize variables needed for the connection status methods. """
self.last_strength = -2
self.last_state = misc.NOT_CONNECTED
self.last_reconnect_time = time.time()
self.last_network = ""
self.displayed_strength = -1
self.still_wired = False
self.network = ''
self.tried_reconnect = False
self.connection_lost_counter = 0
self.reconnecting = False
self.reconnect_tries = 0
self.signal_changed = False
self.iwconfig = ""
self.trigger_reconnect = False
self.__lost_dbus_count = 0
self._to_time = daemon.GetBackendUpdateInterval()
self.update_callback = None
self.add_poll_callback()
bus = dbusmanager.get_bus()
bus.add_signal_receiver(self._force_update_connection_status,
"UpdateState", "org.wicd.daemon")
bus.add_signal_receiver(self._update_timeout_interval,
"SignalBackendChanged", "org.wicd.daemon")
def _update_timeout_interval(self, interval):
""" Update the callback interval when signaled by the daemon. """
self._to_time = interval
gobject.source_remove(self.update_callback)
self.add_poll_callback()
def _force_update_connection_status(self):
""" Run a connection status update on demand.
Removes the scheduled update_connection_status()
call, explicitly calls the function, and reschedules
it.
"""
gobject.source_remove(self.update_callback)
self.update_connection_status()
self.add_poll_callback()
def add_poll_callback(self):
""" Registers a polling call at a predetermined interval.
The polling interval is determined by the backend in use.
"""
self.update_callback = misc.timeout_add(self._to_time,
self.update_connection_status)
def check_for_wired_connection(self, wired_ip):
""" Checks for a wired connection.
Checks for two states:
1) A wired connection is not in use, but a cable is plugged
in, and the user has chosen to switch to a wired connection
whenever its available, even if already connected to a
wireless network.
2) A wired connection is currently active.
"""
self.trigger_reconnect = False
if not wired_ip and daemon.GetPreferWiredNetwork():
if not daemon.GetForcedDisconnect() and wired.CheckPluggedIn():
self.trigger_reconnect = True
elif wired_ip and wired.CheckPluggedIn():
# Only change the interface if it's not already set for wired
if not self.still_wired:
daemon.SetCurrentInterface(daemon.GetWiredInterface())
self.still_wired = True
return True
# Wired connection isn't active
elif wired_ip and self.still_wired:
# If we still have an IP, but no cable is plugged in
# we should disconnect to clear it.
wired.DisconnectWired()
self.still_wired = False
return False
def check_for_wireless_connection(self, wireless_ip):
""" Checks for an active wireless connection.
Checks for an active wireless connection. Also notes
if the signal strength is 0, and if it remains there
for too long, triggers a wireless disconnect.
Returns True if wireless connection is active, and
False otherwise.
"""
# Make sure we have an IP before we do anything else.
if not wireless_ip:
return False
if daemon.NeedsExternalCalls():
self.iwconfig = wireless.GetIwconfig()
else:
self.iwconfig = ''
# Reset this, just in case.
self.tried_reconnect = False
bssid = wireless.GetApBssid()
if not bssid:
return False
wifi_signal = self._get_printable_sig_strength(always_positive=True)
if wifi_signal <= 0:
# If we have no signal, increment connection loss counter.
# If we haven't gotten any signal 4 runs in a row (12 seconds),
# try to reconnect.
self.connection_lost_counter += 1
print(self.connection_lost_counter)
if self.connection_lost_counter >= 4 and daemon.GetAutoReconnect():
wireless.DisconnectWireless()
self.connection_lost_counter = 0
return False
else: # If we have a signal, reset the counter
self.connection_lost_counter = 0
if (wifi_signal != self.last_strength or
self.network != self.last_network):
self.last_strength = wifi_signal
self.last_network = self.network
self.signal_changed = True
daemon.SetCurrentInterface(daemon.GetWirelessInterface())
return True
@diewithdbus
def update_connection_status(self):
""" Updates the tray icon and current connection status.
Determines the current connection state and sends a dbus signal
announcing when the status changes. Also starts the automatic
reconnection process if necessary.
"""
wired_ip = None
wifi_ip = None
if daemon.GetSuspend():
print("Suspended.")
state = misc.SUSPENDED
return self.update_state(state)
# Determine what our current state is.
# Are we currently connecting?
if daemon.CheckIfConnecting():
state = misc.CONNECTING
return self.update_state(state)
daemon.SendConnectResultsIfAvail()
# Check for wired.
wired_ip = wired.GetWiredIP("")
wired_found = self.check_for_wired_connection(wired_ip)
if wired_found:
return self.update_state(misc. |
eduNEXT/edx-platform | lms/djangoapps/lti_provider/models.py | Python | agpl-3.0 | 6,044 | 0.002151 | """
Database models for the LTI provider feature.
This app uses migrations. If you make changes to this model, be sure to create
an appropriate migration file and check it in at the same time as your model
changes. To do that,
1. Go to the edx-platform dir
2. ./manage.py lms schemamigration lti_provider --auto "description" --settings=devstack
"""
import logging
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.db import models
from opaque_keys.edx.django.models import CourseKeyField, UsageKeyField
from openedx.core.djangolib.fields import CharNullField
from openedx.core.lib.hash_utils import short_token
log = logging.getLogger("edx.lti_provider")
class LtiConsumer(models.Model):
"""
Database model representing an LTI consumer. This model stores the consumer
specific settings, such as the OAuth key/secret pair and any LTI fields
that must be persisted.
.. no_pii:
"""
consumer_name = models.CharField(max_length=255, unique=True)
consumer_key = models.CharField(max_length=32, unique=True, db_index=True, default=short_token)
consumer_secret = models.CharField(max_length=32, unique=True, default=short_token)
instance_guid = CharNullField(max_length=255, blank=True, null=True, unique=True)
@staticmethod
def get_or_supplement(instance_guid, consumer_key):
"""
The instance_guid is the best way to uniquely identify an LTI consumer.
However according to the LTI spec, the instance_guid field is optional
and so cannot be relied upon to be present.
This method first attempts to find an LtiConsumer by instance_guid.
Failing that, it tries to find a record with a matching consumer_key.
This can be the case if the LtiConsumer record was created as the result
of an LTI launch with no instance_guid.
If the instance_guid is now present, the LtiConsumer model will be
supplemented with the instance_guid, to more concretely identify the
consumer.
In practice, nearly all major LTI consumers provide an instance_guid, so
the fallback mechanism of matching by consumer key should be rarely
required.
"""
consumer = None
if instance_guid:
try:
consumer = LtiConsumer.objects.get(instance_guid=instance_guid)
except LtiConsumer.DoesNotExist:
# The consumer may not exist, or its record may not have a guid
pass
# Se | arch by consumer key instead of instance_guid. If there is no
# consumer with a matching key, the LTI launch does not have permission
# to access the content.
if not consum | er:
consumer = LtiConsumer.objects.get(
consumer_key=consumer_key,
)
# Add the instance_guid field to the model if it's not there already.
if instance_guid and not consumer.instance_guid:
consumer.instance_guid = instance_guid
consumer.save()
return consumer
class OutcomeService(models.Model):
"""
Model for a single outcome service associated with an LTI consumer. Note
that a given consumer may have more than one outcome service URL over its
lifetime, so we need to store the outcome service separately from the
LtiConsumer model.
An outcome service can be identified in two ways, depending on the
information provided by an LTI launch. The ideal way to identify the service
is by instance_guid, which should uniquely identify a consumer. However that
field is optional in the LTI launch, and so if it is missing we can fall
back on the consumer key (which should be created uniquely for each consumer
although we don't have a technical way to guarantee that).
Some LTI-specified fields use the prefix lis_; this refers to the IMS
Learning Information Services standard from which LTI inherits some
properties
.. no_pii:
"""
lis_outcome_service_url = models.CharField(max_length=255, unique=True)
lti_consumer = models.ForeignKey(LtiConsumer, on_delete=models.CASCADE)
class GradedAssignment(models.Model):
"""
Model representing a single launch of a graded assignment by an individual
user. There will be a row created here only if the LTI consumer may require
a result to be returned from the LTI launch (determined by the presence of
the lis_result_sourcedid parameter in the launch POST). There will be only
one row created for a given usage/consumer combination; repeated launches of
the same content by the same user from the same LTI consumer will not add
new rows to the table.
Some LTI-specified fields use the prefix lis_; this refers to the IMS
Learning Information Services standard from which LTI inherits some
properties
.. no_pii:
"""
user = models.ForeignKey(User, db_index=True, on_delete=models.CASCADE)
course_key = CourseKeyField(max_length=255, db_index=True)
usage_key = UsageKeyField(max_length=255, db_index=True)
outcome_service = models.ForeignKey(OutcomeService, on_delete=models.CASCADE)
lis_result_sourcedid = models.CharField(max_length=255, db_index=True)
version_number = models.IntegerField(default=0)
class Meta:
unique_together = ('outcome_service', 'lis_result_sourcedid')
class LtiUser(models.Model):
"""
Model mapping the identity of an LTI user to an account on the edX platform.
The LTI user_id field is guaranteed to be unique per LTI consumer (per
to the LTI spec), so we guarantee a unique mapping from LTI to edX account
by using the lti_consumer/lti_user_id tuple.
.. no_pii:
"""
lti_consumer = models.ForeignKey(LtiConsumer, on_delete=models.CASCADE)
lti_user_id = models.CharField(max_length=255)
edx_user = models.OneToOneField(User, on_delete=models.CASCADE)
class Meta:
unique_together = ('lti_consumer', 'lti_user_id')
|
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractSunkenfleetWordpressCom.py | Python | bsd-3-clause | 742 | 0.028302 | def extractSunkenfleetWordpressCom(item):
'''
Parser for 'sunkenfleet.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('noble wife', 'Noble Wife Wants No Love', 'translated'), |
('dflb', 'Don\'t Fall In Love With The Boss', | 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
hustodemon/spacewalk | backend/satellite_tools/__init__.py | Python | gpl-2.0 | 49 | 0 | # Copyright (c) 2005, Red Hat Inc.
|
__all__ | = []
|
beagles/neutron_hacking | neutron/agent/rpc.py | Python | apache-2.0 | 3,789 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from oslo.config import cfg
from oslo import messaging
from neutron.common import rpc
from neutron.common import topics
from neutron.openstack.common import log as logging
from neutron.openstack.common import timeutils
LOG = logging.getLogger(__name__)
def create_servers(endpoints, prefix, topic_details):
"""Create agent RPC servers.
:param endpoints: A list of RPC endpoints to serve messages.
:param prefix: Common prefix for the plugin/agent message queues.
:param topic_details: A list of topics. Each topic has a name, an
operation, and an optional host param keying the
subscription to topic.host for plugin calls.
:returns: A list of RPCServer objects.
"""
rpc_servers = []
for details in topic_details:
topic, operation, node_name = itertools.islice(
itertools.chain(details, [None]), 3)
topic_name = topics.get_topic_name(prefix, topic, operation)
if not node_name:
node_name = cfg.CONF.host
target = messaging.Target(topic=topic_name, server=node_name)
server = rpc.get_server(target, endpoints=endpoints)
server.start()
rpc_servers.append(server)
return rpc_servers
class PluginReportStateAPI(object):
def __init__(self, topic):
super(PluginReportStateAPI, self).__init__()
target = messaging.Target(topic=topic, version='1.0')
self.client = rpc.get_client(target)
def report_state(self, context, agent_state, use_call=False):
fun = self.client.call if use_call else self.client.cast
return fun(context, 'report_state',
agent_state={'agent_state': agent_state},
time=timeutils.strtime())
class PluginApi(object):
'''Agent side of the rpc API.
API version history:
1.0 - Initial version.
'''
def __init__(self, topic):
super(PluginApi, self).__init__()
target = messaging.Target(topic=topic, version='1.1')
self.client = rpc.get_client(target)
def get_device_details(self, context, device, agent_id):
return self.client.call(context, 'get_device_details',
device=device,
agent_id=agent_id)
def update_device_down(self, context, device, ag | ent_id, host=None):
return self.client.call(context, 'update_device_down',
device=device,
agent_id=agent_id,
host=host)
def update_dev | ice_up(self, context, device, agent_id, host=None):
return self.client.call(context, 'update_device_up',
device=device,
agent_id=agent_id,
host=host)
def tunnel_sync(self, context, tunnel_ip, tunnel_type=None):
return self.client.call(context, 'tunnel_sync',
tunnel_ip=tunnel_ip,
tunnel_type=tunnel_type)
|
Juniper/tempest | tempest/api/compute/admin/test_flavors.py | Python | apache-2.0 | 10,059 | 0 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest.api.compute import base
from tempest.common import utils
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
class FlavorsAdminTestJSON(base.BaseV2ComputeAdminTest):
"""Tests Flavors API Create and Delete that require admin privileges"""
@classmethod
def skip_checks(cls):
super(FlavorsAdminTestJSON, cls).skip_checks()
if not utils.is_extension_enabled('OS-FLV-EXT-DATA', 'compute'):
msg = "OS-FLV-EXT-DATA extension not enabled."
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(FlavorsAdminTestJSON, cls).resource_setup()
cls.flavor_name_prefix = 'test_flavor_'
cls.ram = 512
cls.vcpus = 1
cls.disk = 10
cls.ephemeral = 10
cls.swap = 1024
cls.rxtx = 2
@decorators.idempotent_id('8b4330e1-12c4-4554-9390-e6639971f086')
def test_create_flavor_with_int_id(self):
flavor_id = data_utils.rand_int_id(start=1000)
new_flavor_id = self.create_flavor(ram=self.ram,
vcpus=self.vcpus,
disk=self.disk,
id=flavor_id)['id']
self.assertEqual(new_flavor_id, str(flavor_id))
@decorators.idempotent_id('94c9bb4e-2c2a-4f3c-bb1f-5f0daf918e6d')
def test_create_flavor_with_uuid_id(self):
flavor_id = data_utils.rand_uuid()
new_flavor_id = self.create_flavor(ram=self.ram,
vcpus=self.vcpus,
disk=self.disk,
id=flavor_id)['id']
self.assertEqual(new_flavor_id, flavor_id)
@decorators.idempotent_id('f83fe669-6758-448a-a85e-32d351f36fe0')
def test_create_flavor_with_none_id(self):
# If nova receives a request with None as flavor_id,
# nova generates flavor_id of uuid.
flavor_id = None
new_flavor_id = self.create_flavor(ram=self.ram,
vcpus=self.vcpus,
disk=self.disk,
id=flavor_id)['id']
self.assertEqual(new_flavor_id, str(uuid.UUID(new_flavor_id)))
@decorators.idempotent_id('8261d7b0-be58-43ec-a2e5-300573c3f6c5')
def test_create_flavor_verify_entry_in_list_details(self):
# Create a flavor and ensure it's details are listed
# This operation requires the user to have 'admin' role
flavor_name = data_utils.rand_name(self.flavor_name_prefix)
# Create the flavor
self.create_flavor(name=flavor_name,
ram=self.ram, vcpus=self.vcpus,
disk=self.disk,
ephemeral=self.ephemeral,
swap=self.swap,
rxtx_factor=self.rxtx)
# Check if flavor is present in list
flavors_list = self.admin_flavors_client.list_flavors(
detail=True)['flavors']
self.assertIn(flavor_name, [f['name'] for f in flavors_list])
@decorators.idempotent_id('63dc64e6-2e79-4fdf-868f-85500d308d66')
def test_create_list_flavor_without_extra_data(self):
# Create a flavor and ensure it is listed
# This operation requires the user to have 'admin' role
def verify_flavor_response_extension(flavor):
# check some extensions for the flavor create/show/detail response
self.assertEqual(flavor['swap'], '')
self.assertEqual(int(flavor['rxtx_factor']), 1)
self.assertEqual(flavor['OS-FLV-EXT-DATA:ephemeral'], 0)
self.assertEqual(flavor['os-flavor-access:is_public'], True)
flavor_name = data_utils.rand_name(self.flavor_name_prefix)
new_flavor_id = data_utils.rand_int_id(start=1000)
# Create the flavor
flavor = self.create_flavor(name=flavor_name,
ram=self.ram, vcpus=self.vcpus,
disk=self.disk,
id=new_flavor_id)
self.assertEqual(flavor['name'], flavor_name)
self | .assertEqual(flavor['ram'], self.ram)
self.assertEqual(flavor['vcpus'], self.vcpus)
self.assertEqual(flavor['disk'], self.disk)
self.assertEqual(int(flavor['id']), new_flavor_id)
verify_flavor_response_extension(flavor)
# Verify flavor is retrieved
flavor = se | lf.admin_flavors_client.show_flavor(new_flavor_id)['flavor']
self.assertEqual(flavor['name'], flavor_name)
verify_flavor_response_extension(flavor)
# Check if flavor is present in list
flavors_list = [
f for f in self.flavors_client.list_flavors(detail=True)['flavors']
if f['name'] == flavor_name
]
self.assertNotEmpty(flavors_list)
verify_flavor_response_extension(flavors_list[0])
@decorators.idempotent_id('be6cc18c-7c5d-48c0-ac16-17eaf03c54eb')
def test_list_non_public_flavor(self):
# Create a flavor with os-flavor-access:is_public false.
# The flavor should not be present in list_details as the
# tenant is not automatically added access list.
# This operation requires the user to have 'admin' role
flavor_name = data_utils.rand_name(self.flavor_name_prefix)
# Create the flavor
self.create_flavor(name=flavor_name,
ram=self.ram, vcpus=self.vcpus,
disk=self.disk,
is_public="False")
# Verify flavor is not retrieved
flavors_list = self.admin_flavors_client.list_flavors(
detail=True)['flavors']
self.assertNotIn(flavor_name, [f['name'] for f in flavors_list])
# Verify flavor is not retrieved with other user
flavors_list = self.flavors_client.list_flavors(detail=True)['flavors']
self.assertNotIn(flavor_name, [f['name'] for f in flavors_list])
@decorators.idempotent_id('bcc418ef-799b-47cc-baa1-ce01368b8987')
def test_create_server_with_non_public_flavor(self):
# Create a flavor with os-flavor-access:is_public false
flavor = self.create_flavor(ram=self.ram, vcpus=self.vcpus,
disk=self.disk,
is_public="False")
# Verify flavor is not used by other user
self.assertRaises(lib_exc.BadRequest,
self.os_primary.servers_client.create_server,
name='test', imageRef=self.image_ref,
flavorRef=flavor['id'])
@decorators.idempotent_id('b345b196-bfbd-4231-8ac1-6d7fe15ff3a3')
def test_list_public_flavor_with_other_user(self):
# Create a Flavor with public access.
# Try to List/Get flavor with another user
flavor_name = data_utils.rand_name(self.flavor_name_prefix)
# Create the flavor
self.create_flavor(name=flavor_name,
ram=self.ram, vcpus=self.vcpus,
disk=self.disk,
is_public="True")
# Verify flavor is retrieved with new user
flavors_list = self.flavors_client.list_flavors(detail=True)['flavors']
self.assertIn(flavor_name, [f['name'] for f in flavors_l |
ubik2/PEGAS-kRPC | kRPC/Main.py | Python | mit | 10,069 | 0.002781 | import numpy as np
import time
import krpc
import init_simulation
import unified_powered_flight_guidance as upfg
from launch_targeting import launch_targeting, LaunchSite
from flight_manager import flight_manager
from flight_sim_3d import GravityTurnControl, cart2sph
g0 = init_simulation.g0
mu = init_simulation.mu
R = init_simulation.R
period = init_simulation.period
conn = None
space_center = None
vessel = None
def init_from_ksp():
global conn
global space_center
global vessel
global mu
global R
global period
conn = krpc.connect(name='Launch to orbit')
space_center = conn.space_center
vessel = space_center.active_vessel
mu = vessel.orbit.body.gravitational_parameter
R = vessel.orbit.body.equatorial_radius
period = vessel.orbit.body.rotational_period
def vec_yz(v):
return np.array([v[0], v[2], v[1]])
def get_engines(vessel, decouple_stage):
global g0
engines = []
for engine in vessel.parts.engines:
if engine.part.decouple_stage == decouple_stage:
flow = engine.max_vacuum_thrust / (engine.vacuum_specific_impulse * g0)
# TODO: determine throttle limits on the engine
engines.append(upfg.VehicleEngine(1, engine.vacuum_specific_impulse, engine.kerbin_sea_level_specific_impulse, flow, [0, 1]))
return engines
def get_max_stage(part_list):
max_stage = 0
for part in part_list:
if part.stage > max_stage:
max_stage = part.stage
return max_stage
def get_mass(vessel, decouple_stage):
mass = 0
for part in vessel.parts.all:
if part.decouple_stage == decouple_stage:
mass = mass + part.mass
return mass
def get_fuel_mass(vessel, decouple_stage):
fuel_mass = 0
for part in vessel.parts.all:
if part.decouple_stage == decouple_stage:
fuel_mass = fuel_mass + part.mass - part.dry_mass
return fuel_mass
def check_engine(vessel):
"""
Check to see if there are active engines
:param vessel: Vessel object from kRPC
:return: whether any engines are active and have thrust availa | ble
"""
for engine in vessel.parts.engines:
if engine.active and engine.available_thrust > 0:
return True
return False
def stage_if_needed(vessel):
"""
Check to see if we need to stage, and if so, activate the next stage
:param vessel: Vessel object from kRPC
:return: whether | we activated a new stage
"""
if check_engine(vessel):
return False
print('There is no active engine, checking Propellant condition')
for engine in vessel.parts.engines:
if engine.has_fuel:
for engine_module in engine.part.modules:
if engine_module.has_field('Propellant'):
if engine_module.get_field('Propellant') == 'Very Stable':
print('Engine is ready; staging')
vessel.control.forward = 0
vessel.control.throttle = 1
vessel.control.activate_next_stage()
return True
print('No engine is ready')
return False
def apply_guidance(pitch, yaw, desired_throttle):
global vessel
# KSP target_pitch has 90 as up and 0 as flat, while guidance.pitch has 0 as up and 90 as flat
vessel.auto_pilot.target_pitch = 90 - pitch
vessel.auto_pilot.target_heading = 90 - yaw
vessel.control.throttle = desired_throttle
time.sleep(0.2) # run our guidance 5x a second
# See if we need to auto-stage
stage_if_needed(vessel)
# we do this after the sleep, to get the last tick of thrust
vessel.control.throttle = desired_throttle
# print('pitch: %f, yaw: %f, throttle: %f' % (pitch, yaw, desired_throttle))
def get_state():
global liftoff_time
global vessel
r = vec_yz(vessel.flight(vessel.orbit.body.non_rotating_reference_frame).center_of_mass)
v = vec_yz(vessel.flight(vessel.orbit.body.non_rotating_reference_frame).velocity)
m = vessel.mass
t = vessel.met - liftoff_time
# print('current state: r=%s, v=%s, m=%f, t=%f' % (r, v, m, t))
return r, v, m, t
def site_from_position(vessel):
# The initial site is based on the rotating reference frame, but we need a position in the non-rotating reference frame
r = vec_yz(vessel.flight(vessel.orbit.body.non_rotating_reference_frame).center_of_mass)
longitude, latitude, r = cart2sph(r[0], r[1], r[2])
updated_site = LaunchSite(np.rad2deg(latitude), np.rad2deg(longitude), r - vessel.orbit.body.equatorial_radius)
return updated_site
def warp_to_launch_time(space_center, launch_time):
print('Launch time is %f' % launch_time)
game_launch_time = space_center.ut + launch_time
space_center.warp_to(game_launch_time - 10)
while (space_center.ut - game_launch_time) < 0:
print('Time to launch %f' % (space_center.ut - game_launch_time))
time.sleep(1)
def vehicle_from_vessel(vessel, gravity_turn_t):
"""
Generate a basic vehicle structure from the provided vessel.
More complicated flights will require manual construction of this structure.
:param vessel: kRPC Vessel object
:param gravity_turn_t: Time to spend in the initial gravity turn (s)
:return: list of VehicleStage objects representing the various flight stages
"""
vehicle_stages = []
max_stage = get_max_stage(vessel.parts.all)
engines = [None]*(max_stage+2)
masses = [None]*(max_stage+2)
fuel_masses = [None]*(max_stage+2)
for decouple_stage in range(-1, max_stage+1):
engines[decouple_stage+1] = get_engines(vessel, decouple_stage)
masses[decouple_stage+1] = get_mass(vessel, decouple_stage)
fuel_masses[decouple_stage+1] = get_fuel_mass(vessel, decouple_stage)
# Assumption here is that we jettison the fuel tank at the same times as their corresponding engines
for stage in range(max_stage+1, -1, -1):
if len(engines[stage]) == 0:
continue
fuel_mass = fuel_masses[stage]
flow = reduce(lambda x, engine: x + engine.flow, engines[stage], 0.0)
stage_time = fuel_mass / flow
mass = 0.0
for i in range(0, stage+1):
mass = mass + masses[i]
vehicle_stages.append(upfg.VehicleStage(1, mass, engines[stage], stage_time, 0, 0, [[0, 0]]))
if gravity_turn_t > 0.0:
split_stage = vehicle_stages[0]
split_stage_flow = reduce(lambda x, engine: x + engine.flow, split_stage.engines, 0.0)
gravity_stage = upfg.VehicleStage(1, split_stage.m0, split_stage.engines, gravity_turn_t,
split_stage.gLim, split_stage.area, split_stage.drag)
upfg_stage = upfg.VehicleStage(1, split_stage.m0 - split_stage_flow * gravity_turn_t,
split_stage.engines, split_stage.maxT - gravity_turn_t,
split_stage.gLim, split_stage.area, split_stage.drag)
vehicle_stages = [gravity_stage, upfg_stage] + vehicle_stages[1:]
return vehicle_stages
# Connect to ksp, and set up some globals
init_from_ksp()
# Generate a basic vehicle description, with a 90 second gravity turn, followed by UPFG for each stage
vehicle = vehicle_from_vessel(vessel, 90)
# Target orbit parameters
apoapsis = 250
periapsis = 170
inclination = 28.61 # 28.61 = 23.45+5.16
lan = 0 # Set this to None if you don't care
print('Launching into orbit with apoapsis of %fkm, periapsis %fkm, inclination %f and LAN %s degrees' %
(apoapsis, periapsis, inclination, lan))
launch_site = LaunchSite(vessel.flight().latitude, vessel.flight().longitude, 0)
if inclination < launch_site.latitude:
print('Target inclination is below site latitude. Setting inclination to match latitude.')
inclination = launch_site.latitude
# Determine now long we need to wait before launching, to make our longitude of asce |
deviantintegral/feedback | src/listener.py | Python | mit | 622 | 0.009646 | from flask import Flask, request, send_from_directory
from time import sleep
app = Flask(__name__, static_url_path='/public')
@app.route('/')
def root():
return send_from_directory('./', 'index.html')
@app.route('/feedback.js')
def script():
return send_from_directory('./', 'feedback.js')
@app.route('/feedback.css')
def style():
| return send_from_directory('./', 'feedback.css')
@app.route('/icons.png')
def icons():
return send_from_directory('./', 'icons.png')
@app.route('/listener', methods=['GET', 'POST'])
def listener():
sleep(30)
re | turn '1'
if __name__ == '__main__':
app.run()
|
deepmind/distrax | distrax/_src/distributions/mvn_from_bijector_test.py | Python | apache-2.0 | 12,417 | 0.004429 | # Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `mvn_from_bijector.py`."""
from typing import Tuple
from absl.testing import absltest
from absl.testing import parameterized
import chex
from distrax._src.bijectors import bijector
from distrax._src.bijectors.diag_affine import DiagAffine
from distrax._src.bijectors.triangular_affine import TriangularAffine
from distrax._src.distributions.mvn_from_bijector import MultivariateNormalFromBijector
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
from tensorflow_probability.substrates import jax as tfp
tfd = tfp.distributions
Array = chex.Array
class DummyBijector(bijector.Bijector):
"""A dummy bijector."""
def forward_and_log_det(self, x: Array) -> Tuple[Array, Array]:
"""Computes y = f(x) and log|det J(f)(x)|."""
return x, jnp.zeros_like(x)[:-1]
class MultivariateNormalFromBijectorTest(parameterized.TestCase):
@parameterized.named_parameters(
('wrong event_ndims_in', 2, 1, True, np.zeros((4,))),
('wrong event_ndims_out', 1, 2, True, np.zeros((4,))),
('not constant Jacobian', 1, 1, False, np.ze | ros((4,))),
('loc is 0d', 1, 1, True, np.zeros(shape=())),
| ('loc has more dims than batch_shape', 1, 1, True,
np.zeros(shape=(2, 4))),
)
def test_raises_on_wrong_inputs(
self, event_ndims_in, event_ndims_out, is_constant_jacobian, loc):
bij = DummyBijector(event_ndims_in, event_ndims_out, is_constant_jacobian)
with self.assertRaises(ValueError):
MultivariateNormalFromBijector(loc, bij, batch_shape=())
@parameterized.named_parameters(
('no broadcast', np.ones((4,)), np.zeros((4,)), (4,)),
('broadcasted loc', np.ones((3, 4)), np.zeros((4,)), (3, 4)),
('broadcasted diag', np.ones((4,)), np.zeros((3, 4)), (3, 4)),
)
def test_loc_scale_and_shapes(self, diag, loc, expected_shape):
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
batch_shape = jnp.broadcast_shapes(diag.shape, loc.shape)[:-1]
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
np.testing.assert_allclose(dist.loc, np.zeros(expected_shape))
self.assertTrue(scale.same_as(dist.scale))
self.assertEqual(dist.event_shape, (4,))
self.assertEqual(dist.batch_shape, batch_shape)
@chex.all_variants
def test_sample(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = 0.5 + jax.random.uniform(next(prng), (4,))
loc = jax.random.normal(next(prng), (4,))
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
dist = MultivariateNormalFromBijector(loc, scale, batch_shape=())
num_samples = 100_000
sample_fn = lambda seed: dist.sample(seed=seed, sample_shape=num_samples)
samples = self.variant(sample_fn)(jax.random.PRNGKey(2000))
self.assertEqual(samples.shape, (num_samples, 4))
np.testing.assert_allclose(jnp.mean(samples, axis=0), loc, rtol=0.1)
np.testing.assert_allclose(jnp.std(samples, axis=0), diag, rtol=0.1)
@chex.all_variants
def test_log_prob(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = 0.5 + jax.random.uniform(next(prng), (4,))
loc = jax.random.normal(next(prng), (4,))
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
dist = MultivariateNormalFromBijector(loc, scale, batch_shape=())
values = jax.random.normal(next(prng), (5, 4))
tfp_dist = tfd.MultivariateNormalDiag(loc=loc, scale_diag=diag)
np.testing.assert_allclose(
self.variant(dist.log_prob)(values), tfp_dist.log_prob(values))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4,), (4,)),
('broadcasted loc', (3, 4), (4,)),
('broadcasted diag', (4,), (3, 4)),
)
def test_mean_median_mode(self, diag_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = jax.random.normal(next(prng), diag_shape)
loc = jax.random.normal(next(prng), loc_shape)
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
batch_shape = jnp.broadcast_shapes(diag_shape, loc_shape)[:-1]
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['mean', 'median', 'mode']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
np.testing.assert_allclose(
fn(), jnp.broadcast_to(loc, batch_shape + loc.shape[-1:]))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4,), (4,)),
('broadcasted loc', (3, 4), (4,)),
('broadcasted diag', (4,), (3, 4)),
)
def test_variance_stddev_covariance_diag(self, scale_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_diag = jax.random.normal(next(prng), scale_shape)
loc = jax.random.normal(next(prng), loc_shape)
scale = DiagAffine(diag=scale_diag, bias=jnp.zeros_like(scale_diag))
batch_shape = jnp.broadcast_shapes(scale_shape[:-1], loc_shape[:-1])
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['variance', 'stddev', 'covariance']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
if method == 'variance':
expected_result = jnp.broadcast_to(
jnp.square(scale_diag), batch_shape + loc.shape[-1:])
elif method == 'stddev':
expected_result = jnp.broadcast_to(
jnp.abs(scale_diag), batch_shape + loc.shape[-1:])
elif method == 'covariance':
expected_result = jnp.broadcast_to(
jnp.vectorize(jnp.diag, signature='(k)->(k,k)')(
jnp.square(scale_diag)),
batch_shape + loc.shape[-1:] + loc.shape[-1:])
np.testing.assert_allclose(fn(), expected_result, rtol=5e-3)
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4, 4), (4,)),
('broadcasted loc', (3, 4, 4), (4,)),
('broadcasted diag', (4, 4), (3, 4)),
)
def test_variance_stddev_covariance_no_diag(self, scale_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_tril = jnp.tril(jax.random.normal(next(prng), scale_shape))
loc = jax.random.normal(next(prng), loc_shape)
scale = TriangularAffine(
matrix=scale_tril,
bias=jnp.zeros_like(scale_tril[..., 0]),
is_lower=True)
batch_shape = jnp.broadcast_shapes(scale_shape[:-2], loc_shape[:-1])
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['variance', 'stddev', 'covariance']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
scale_tril_t = jnp.vectorize(
jnp.transpose, signature='(k,k)->(k,k)')(scale_tril)
scale_times_scale_t = jnp.matmul(scale_tril, scale_tril_t)
if method == 'variance':
expected_result = jnp.vectorize(jnp.diag, signature='(k,k)->(k)')(
scale_times_scale_t)
expected_result = jnp.broadcast_to(
expected_result, batch_shape + loc.shape[-1:])
elif method == 'stddev':
expected_result = jnp.vectorize(jnp.diag, signature='(k,k)->(k)')(
jnp.sqrt(scale_times_scale_t))
expected_result = jnp.broadcast_to(
expected_result, batch_shape + loc.shape[-1:])
elif method == 'covariance':
expected_result = jnp.broadcast_to(
scale_times_scale_t, batch_shape + scale_tril.shape[-2:])
np.testing.assert_allclose( |
morpheby/levelup-by | cms/envs/aws.py | Python | agpl-3.0 | 6,436 | 0.002175 | """
This is the default template for our main set of AWS servers.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
import json
from .common import *
from logsettings import get_logger_config
import os
# specified as an environment variable. Typically this is set
# in the service's upstart script and corresponds exactly to the service name.
# Service variants apply config differences via env and auth JSON files,
# the names of which correspond to the variant.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
# when not variant is specified we attempt to load an unvaried
# config set.
CONFIG_PREFIX = ""
if SERVICE_VARIANT:
CONFIG_PREFIX = SERVICE_VARIANT + "."
############### ALWAYS THE SAME ################################
DEBUG = False
TEMPLATE_DEBUG = False
EMAIL_BACKEND = 'django_ses.SESBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
###################################### CELERY ################################
# Don't use a connection pool, since connections are dropped by ELB.
BROKER_POOL_LIMIT = 0
BROKER_CONNECTION_TIMEOUT = 1
# For the Result Store, use the django cache named 'celery'
CELERY_RESULT_BACKEND = 'cache'
CELERY_CACHE_BACKEND = 'celery'
# When the broker is behind an ELB, use a heartbeat to refresh the
# connection and to detect if it has been dropped.
BROKER_HEARTBEAT = 10.0
BROKER_HEARTBEAT_CHECKRATE = 2
# Each worker should only fetch one message at a time
CELERYD_PREFETCH_MULTIPLIER = 1
# Skip djcelery migrations, since we don't use the database as the broker
SOUTH_MIGRATION_MODULES = {
'djcelery': 'ignore',
}
# Rename the exchange and queues for each variant
QUEUE_VARIANT = CONFIG_PREFIX.lower()
CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT)
HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT)
DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT)
LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT)
CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE
CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
CELERY_QUEUES = {
HIGH_PRIORITY_QUEUE: {},
LOW_PRIORITY_QUEUE: {},
DEFAULT_PRIORITY_QUEUE: {}
}
############# NON-SECURE ENV CONFIG ##############################
# Things like server locations, ports, etc.
with open(ENV_ROOT / CONFIG_PREFIX + "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
EMAIL_BACKEND = ENV_TOKENS.get('EMAIL_BACKEND', EMAIL_BACKEND)
EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None)
LMS_BASE = ENV_TOKENS.get('LMS_BASE')
# Note that MITX_FEATURES['PREVIEW_LMS_BASE'] gets read in from the environment file.
SITE_NAME = ENV_TOKENS['SITE_NAME']
LOG_DIR = ENV_TOKENS['LOG_DIR']
CACHES = ENV_TOKENS['CACHES']
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
SESSION_ENGINE = ENV_TOKENS.get('SESSION_ENGINE', SESSION_ENGINE)
# allow for environments to specify what cookie name our login subsystem should use
# this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can
# happen with some browsers (e.g. Firefox)
if ENV_TOKENS.get('SESSION_COOKIE_NAME', None):
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME'))
#Email overrides
DEFAULT_FROM_EMAIL = ENV_TOKENS.get('DEFAULT_FROM_EMAIL', DEFAULT_FROM_EMAIL)
DEFAULT_FEEDBACK_EMAIL = ENV_TOKENS.get('DEFAULT_FEEDBACK_EMAIL', DEFAULT_FEEDBACK_EMAIL)
ADMINS = ENV_TOKENS.get('ADMINS', ADMINS)
SERVER_EMAIL = ENV_TOKENS.get('SERVER_EMAIL', SERVER_EMAIL)
MKTG_URLS = ENV_TOKENS.get('MKTG_URLS', MKTG_URLS)
TECH_SUPPORT_EMAIL = ENV_TOKENS.get('TECH_SUPPORT_EMAIL', TECH_SUPPORT_EMAIL)
COURSES_WITH_UNSAFE_CODE | = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])
#Timezone overrides
TIME_ZON | E = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE)
for feature, value in ENV_TOKENS.get('MITX_FEATURES', {}).items():
MITX_FEATURES[feature] = value
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
debug=False,
service_variant=SERVICE_VARIANT)
#theming start:
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'edX')
# Event Tracking
if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS:
TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS")
################ SECURE AUTH ITEMS ###############################
# Secret things: passwords, access keys, etc.
with open(ENV_ROOT / CONFIG_PREFIX + "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
# If Segment.io key specified, load it and turn on Segment.io if the feature flag is set
# Note that this is the Studio key. There is a separate key for the LMS.
SEGMENT_IO_KEY = AUTH_TOKENS.get('SEGMENT_IO_KEY')
if SEGMENT_IO_KEY:
MITX_FEATURES['SEGMENT_IO'] = ENV_TOKENS.get('SEGMENT_IO', False)
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
DATABASES = AUTH_TOKENS['DATABASES']
MODULESTORE = AUTH_TOKENS['MODULESTORE']
CONTENTSTORE = AUTH_TOKENS['CONTENTSTORE']
# Datadog for events!
DATADOG = AUTH_TOKENS.get("DATADOG", {})
DATADOG.update(ENV_TOKENS.get("DATADOG", {}))
# TODO: deprecated (compatibility with previous settings)
if 'DATADOG_API' in AUTH_TOKENS:
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']
# Celery Broker
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "")
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")
BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_USER,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_VHOST)
# Event tracking
TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
|
Rbeaty88/ginga | ginga/qtw/ipg.py | Python | bsd-3-clause | 12,780 | 0.002817 | #! /usr/bin/env python
#
# ipg.py -- Example of Ginga widget interaction with IPython.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# CREDIT:
# Contains code from IPython tutorial programs "qtapp_ip.py" and "kapp.py".
# Author not listed.
#
import sys, os
import logging
from ginga import AstroImage
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw.ImageViewCanvasQt import ImageViewCanvas
from ginga.misc import log, Settings
from IPython.lib.kernel import connect_qtconsole
try:
# older IPython releases
from IPython.zmq.ipkernel import IPKernelApp
except ImportError:
# newer releases
from IPython.kernel.zmq.kernelapp import IPKernelApp
import matplotlib
# Hack to force matplotlib to not use any Xwindows backend.
matplotli | b.use('Agg')
import matplotlib.pyplot as plt
from IPython.display import Image
import StringIO
STD_FORMAT = '%(asctime)s | | %(levelname)1.1s | %(filename)s:%(lineno)d (%(funcName)s) | %(message)s'
# global ref to keep an object from being collected
app_ref = None
# workaround for suppressing logging to stdout in ipython notebook
# on Macs
use_null_logger = True
class IPyNbImageView(ImageViewCanvas):
def show(self):
return Image(data=bytes(self.get_rgb_image_as_bytes(format='png')),
format='png', embed=True)
def load(self, filepath):
image = AstroImage.AstroImage(logger=self.logger)
image.load_file(filepath)
self.set_image(image)
class SimpleKernelApp(object):
"""A minimal object that uses an IPython kernel and has a few methods
to manipulate a namespace and open Qt consoles tied to the kernel.
Code is modified from IPython tutorial programs 'qtapp_ip.py' and
'kapp.py'.
"""
def __init__(self, gui, shell):
self.shell = shell
self.logger = None
if shell == None:
# Start IPython kernel with GUI event loop support
self.ipkernel = IPKernelApp.instance()
self.ipkernel.initialize(['python', '--gui=%s' % gui,
#'--log-level=10' # for debugging
])
# This application will also act on the shell user namespace
self.namespace = self.ipkernel.shell.user_ns
else:
self.ipkernel = shell.config.IPKernelApp
self.namespace = shell.user_ns
# To create and track active qt consoles
self.consoles = []
def new_qt_console(self, evt=None):
"""start a new qtconsole connected to our kernel"""
try:
if hasattr(self.ipkernel, 'profile'):
return connect_qtconsole(self.ipkernel.connection_file,
profile=self.ipkernel.profile)
else:
return connect_qtconsole(self.ipkernel.connection_file)
except Exception as e:
if self.logger:
self.logger.error("Couldn't start QT Console: %s" % (
str(e)))
def cleanup_consoles(self, evt=None):
for c in self.consoles:
c.kill()
def start(self):
if self.shell == None:
self.ipkernel.start()
class StartMenu(QtGui.QMainWindow):
def __init__(self, logger, app, kapp, prefs):
super(StartMenu, self).__init__()
self.logger = logger
self.preferences = prefs
self.count = 0
self.viewers = {}
self.app = app
self.kapp = kapp
self.app.aboutToQuit.connect(self.quit)
vbox = QtGui.QVBoxLayout()
vbox.setContentsMargins(QtCore.QMargins(2, 2, 2, 2))
vbox.setSpacing(1)
hbox = QtGui.QHBoxLayout()
hbox.setContentsMargins(QtCore.QMargins(4, 2, 4, 2))
console = QtGui.QPushButton('Qt Console')
if self.kapp.ipkernel == None:
console.setEnabled(False)
console.clicked.connect(self.kapp.new_qt_console)
newfits = QtGui.QPushButton('New Viewer')
newfits.clicked.connect(self.new_viewer)
wquit = QtGui.QPushButton("Quit")
wquit.clicked.connect(self.quit)
hbox.addStretch(1)
for w in (console, newfits, wquit):
hbox.addWidget(w, stretch=0)
hw = QtGui.QWidget()
hw.setLayout(hbox)
vbox.addWidget(hw, stretch=0)
vw = QtGui.QWidget()
self.setCentralWidget(vw)
vw.setLayout(vbox)
self.setWindowTitle("Ginga IPython Console")
self.showNormal()
self.raise_()
self.activateWindow()
def new_viewer(self, name=None, settings=None):
if not name:
self.count += 1
name = 'v%d' % self.count
if settings == None:
settings = self.preferences.createCategory('ipg_viewer')
settings.load(onError='silent')
settings.addDefaults(autocut_method='zscale')
# create a ginga basic object for user interaction
fi = IPyNbImageView(self.logger, settings=settings,
render='widget')
fi.enable_draw(True)
fi.set_drawtype('point')
fi.set_drawcolor('blue')
fi.set_callback('drag-drop', self.drop_file, name)
fi.set_bg(0.2, 0.2, 0.2)
fi.ui_setActive(True)
# expose the fits viewer to the shared namespace
self.kapp.namespace[name] = fi
# enable various interactive operations
bd = fi.get_bindings()
bd.enable_pan(True)
bd.enable_zoom(True)
bd.enable_cuts(True)
bd.enable_flip(True)
bd.enable_rotate(True)
bd.enable_cmap(True)
# get the ginga Qt widget
w = fi.get_widget()
w.resize(512, 512)
# pack it into a qt window with a couple other buttons
vw = QtGui.QWidget()
vbox = QtGui.QVBoxLayout()
vbox.setContentsMargins(QtCore.QMargins(2, 2, 2, 2))
vbox.setSpacing(1)
vbox.addWidget(w, stretch=1)
# for simple WCS readout
readout = QtGui.QLabel("")
fi.set_callback('none-move', self.motion, readout)
vbox.addWidget(readout, stretch=0,
alignment=QtCore.Qt.AlignCenter)
hbox = QtGui.QHBoxLayout()
hbox.setContentsMargins(QtCore.QMargins(4, 2, 4, 2))
wopen = QtGui.QPushButton("Open File")
def _open_file(name):
return lambda: self.open_file(name)
wopen.clicked.connect(_open_file(name))
wquit = QtGui.QPushButton("Close")
def _close(name):
return lambda: self.close_viewer(name)
wquit.clicked.connect(_close(name))
hbox.addStretch(1)
for w in (wopen, wquit):
hbox.addWidget(w, stretch=0)
hw = QtGui.QWidget()
hw.setLayout(hbox)
vbox.addWidget(hw, stretch=0)
vw.setLayout(vbox)
vw.resize(524, 540)
vw.setWindowTitle("Ginga: %s" % name)
self.viewers[name] = vw
vw.showNormal()
vw.raise_()
vw.activateWindow()
def close_viewer(self, name):
w = self.viewers[name]
del self.viewers[name]
# remove variable from namespace
del self.kapp.namespace[name]
w.setParent(None)
w.deleteLater()
def load_file(self, filepath, name):
image = AstroImage.AstroImage(logger=self.logger)
image.load_file(filepath)
fi = self.kapp.namespace[name]
fi.set_image(image)
def open_file(self, name):
res = QtGui.QFileDialog.getOpenFileName(self, "Open FITS file",
".", "FITS files (*.fits)")
if isinstance(res, tuple):
fileName = res[0].encode('ascii')
else:
fileName = str(res)
self.load_file(fileName, name)
def drop_file(self, fitsimage, paths, name):
fileName = paths[0]
self.load_file(fileName, name)
|
isc-projects/forge | tests/config.py | Python | isc | 3,628 | 0.001378 | # Copyright (C) 2013-2020 Internet Systems Consortium.
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# This file contains a number of common steps that are general and may be used
# By a lot of feature files.
#
import os
import sys
from forge_cfg import world, step
@step(r'stop process (\w+)')
def stop_a_named_process(process_name):
"""
Stop the process with the given name.
Parameters:
process_name ('process <name>'): Name of the process to stop.
"""
world.processes.stop_process(process_name)
@step(r'wait for (new )?(\w+) stderr message (\w+)(?: not (\w+))?')
def wait_for_err_message(new, process_name, message, not_message):
"""
Block until the given message is printed to the given process's stderr
output.
Parameter:
new: (' new', optional): Only check the output printed since last time
this step was used for this process.
process_name ('<name> stderr'): Name of the process to check the output of.
message ('message <message>'): Output (part) to wait for.
not_message ('not <message>'): Output (part) t | o wait for, and fail
Fails if the message is not found after 10 seconds.
| """
strings = [message]
if not_message is not None:
strings.append(not_message)
(found, line) = world.processes.wait_for_stderr_str(process_name, strings, new)
if not_message is not None:
assert found != not_message, line
@step(r'wait for (new )?(\w+) stdout message (\w+)(?: not (\w+))?')
def wait_for_out_message(process_name, message, not_message):
"""
Block until the given message is printed to the given process's stdout
output.
Parameter:
new: (' new', optional): Only check the output printed since last time
this step was used for this process.
process_name ('<name> stderr'): Name of the process to check the output of.
message ('message <message>'): Output (part) to wait for, and succeed.
not_message ('not <message>'): Output (part) to wait for, and fail
Fails if the message is not found after 10 seconds.
"""
strings = [message]
if not_message is not None:
strings.append(not_message)
(found, line) = world.processes.wait_for_stdout_str(process_name, strings, new)
if not_message is not None:
assert found != not_message, line
@step(r'the file (\S+) should (not )?exist')
def check_existence(file_name, should_not_exist):
"""
Check the existence of the given file.
Parameters:
file_name ('file <name>'): File to check existence of.
should_not_exist ('not', optional): Whether it should or should not exist.
Fails if the file should exist and does not, or vice versa.
"""
if should_not_exist is None:
assert os.path.exists(file_name), file_name + " does not exist"
else:
assert not os.path.exists(file_name), file_name + " exists"
|
lem9/weblate | weblate/trans/tests/test_angularjs_checks.py | Python | gpl-3.0 | 3,835 | 0 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2017 Michal Čihař <michal@cihar.com>
# Copyright © 2015 Philipp Wolfer <ph.wolfer@gmail.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
"""
Tests for AngularJS checks.
"""
from unittest import TestCase
from weblate.trans.checks.angularjs import AngularJSInterpolationCheck
from weblate.trans.tests.test_checks import MockUnit
class AngularJSInterpolationCheckTest(TestCase):
def setUp(self):
self.check = AngularJSInterpolationCheck()
def test_no_format(self):
self.assertFalse(self.check.check_single(
'strins',
'string',
MockUnit('angularjs_no_format', flags='angularjs-format')
))
def test_format(self):
self.assertFalse(self.check.check_single(
u'{{name}} string {{other}}',
u'{{name}} {{other}} string',
MockUnit('angularjs_format', flags='angularjs-format')
))
def test_format_ignore_position(self):
self.assertFalse(self.check.check_single(
u'{{name}} string {{other}}',
u'{{other}} string {{name}}',
MockUnit('angularjs_format_ignore_position',
flags='angularjs-format')
))
def test_different_whitespace(self):
self.assertFalse(self.check.check_single(
u'{{ name }} string',
u'{{name}} string',
MockUnit('angularjs_different_whitespace',
flags='angularjs-format')
))
def test_missing_format(self):
self.assertTrue(self.check.check_single(
u'{{name}} string',
u'string',
MockUnit('angularjs_missing_format', flags='angularjs-format')
))
def test_wrong_value(self):
self.assertTrue(self.check.check_single(
u'{{name}} string',
u'{{nameerror}} string',
MockUnit('angularjs_wrong_value', flags='angularjs-format')
))
def test_extended_formatting(self):
self.assertFalse(self.check.check_single(
u'Value: {{ something.value | currency }}',
u'Wert: {{ something.value | currency }}',
MockUnit('angularjs_format', flags='angularjs-format')
))
self.assertTrue(self.check.check_single(
u'Value: {{ something.valu | e | currency }}',
u'Value: {{ something.value }}',
MockUnit('angularjs_format', flags='angularjs-format')
))
def test_check_highlight(self):
highlights = self.check.check_highlight(
u'{{name}} {{ something.value | currency }} string',
MockUnit('angularjs_format', flags='angularjs-format') | )
self.assertEqual(2, len(highlights))
self.assertEqual(0, highlights[0][0])
self.assertEqual(8, highlights[0][1])
self.assertEqual(9, highlights[1][0])
self.assertEqual(41, highlights[1][1])
def test_check_highlight_ignored(self):
highlights = self.check.check_highlight(
u'{{name}} {{other}} string',
MockUnit('angularjs_format', flags='ignore-angularjs-format'))
self.assertEqual([], highlights)
|
DuGuille/urban-data | client/urbandata.py | Python | apache-2.0 | 1,412 | 0.021955 | import requests
import json
import datetime
import logging
Config = {
'url' : 'http://198.199.98.147:5000/data_point',
'agent' : 'Default',
'user': 'urbanuser',
'password' : 'urbankey'
}
# send (latitud, longitud, extra)
# Sends a bundle of data to the server as a geojson point
# usage :
#send( 14.123, -90.1234, {
# 'ambient_temperature': 27.2,
# 'pressure': 123.21,
# 'humidity': 123.4567
#})
def send(latitud, longitud, extra):
headers = {'content-type': 'application/json'}
properties = {
"agent": Config['agent'],
"time": datetime.datetime.now().strftime("%a, %d %b %Y %H:%M:%S GMT"),
}
properties = dict(properties.items() + extra.items())
payload = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [latitud, longitud]
},
"properties": properties
}
r = requ | ests.post(Config['url'], data=json.dumps(payload), headers=headers, auth=(Config['user'], Config['password']))
logger = logging.getLogger('urban-data')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
|
#TODO: add SQL or TXT file logging policies
#fh = logging.FileHandler('spam.log')
# https://docs.python.org/2/howto/logging-cookbook.html
if r.ok:
logger.debug("Succesful request")
logger.info("Sent data with values: {0}".format(json.dumps(payload)))
|
SavinaRoja/npyscreen2 | npyscreen2/widgets/textfield.py | Python | gpl-3.0 | 9,852 | 0.003959 | # -*- coding: utf-8 -*-
import curses
from . import Widget
import logging
log = logging.getLogger('npyscreen2.widgets.textfield')
class TextField(Widget):
"""
The TextField class will most likely serve as the general basis for most
widgets that display text to the screen. The value attribute should always
be a string.
A TextField intended to be static (display text only and not interact with
the user), should be instantiated with `editable=False`.
"""
def __init__(self,
form,
parent,
value='',
show_cursor=True,
cursor_bold=False,
cursor_color='CURSOR',
cursor_highlight_color='CURSOR_HIGHLIGHT',
cursor_underline=False,
cursor_empty_character=' ',
runoff_left=':',
runoff_right=':',
#wrap_lines=False, # Line wrapping is a nightmare for later
start_cursor_at_end=True,
highlight_whole_widget=False, # not used yet
*args,
**kwargs):
super(TextField, self).__init__(form,
parent,
value=value,
*args,
**kwargs)
self.runoff_left = runoff_left
self.runoff_right = runoff_right
self.highlight_whole_widget = highlight_whole_widget # not used yet
self.start_cursor_at_end = start_cursor_at_end
self.show_cursor = show_cursor
self.cursor_bold = cursor_bold
self.cursor_color = cursor_color
self.cursor_highlight_color = cursor_highlight_color
self.cursor_underline = cursor_underline
self.cursor_empty_character = cursor_empty_character
self._begin_at = 0
self.update()
def set_up_handlers(self):
super(TextField, self).set_up_handlers()
#For OS X
#del_key = curses.ascii.alt('~')
self.handlers.update({curses.KEY_LEFT: self.h_cursor_left,
curses.KEY_RIGHT: self.h_cursor_right,
curses.KEY_DC: self.h_delete_right,
curses.ascii.DEL: self.h_delete_left,
curses.ascii.BS: self.h_delete_left,
curses.KEY_BACKSPACE: self.h_delete_left,
curses.KEY_HOME: self.h_home,
curses.KEY_END: self.h_end,
curses.ascii.NL: self.h_exit_down,
curses.ascii.CR: self.h_exit_down,
curses.ascii.TAB: self.h_exit_down,
#mac os x curses reports DEL as escape oddly
#no solution yet
"^K": self.h_erase_right,
"^U": self.h_erase_left,
})
self.complex_handlers.extend((
(self.t_input_ispr | int, self.h_addch),
# (self.t_is_ck, self.h_erase_right),
# (self.t_is_cu, self.h_erase_left),
))
def _ | pre_edit(self):
super(TextField, self)._pre_edit()
#self.bold = True
#Explicitly setting the behavior for an unset cursor_position
if self.cursor_position is None and self.start_cursor_at_end:
self.cursor_position = len(self.value)
else:
self.cursor_position = 0
def _post_edit(self):
super(TextField, self)._post_edit()
#self.bold = False
#Cause the widget to forget where the cursor was, and reset the begin
self.cursor_position = None
self.begin_at = 0
def printable_value(self):
#This was based on the assumption that the cursor was determined by edit
#if self.editable:
#max_string_length = self.width - 1
if self.show_cursor:
max_string_length = self.width - 1
else:
max_string_length = self.width
val = self.value[self.begin_at:]
if len(val) > max_string_length:
val = val[:max_string_length] + self.runoff_right
return val
def resize(self):
self.height = 1
def update(self):
if self.cursor_position is not None:
if self.cursor_position < self.begin_at:
self.begin_at = self.cursor_position
while self.cursor_position > self.begin_at + self.max_width - 1:
self.begin_at += 1
self.addstr(self.rely, self.relx, self.printable_value())
if self.editing and self.show_cursor:
self.print_cursor()
if self.begin_at > 0:
self.addch(self.rely, self.relx, self.runoff_left)
def print_cursor(self):
# This needs fixing for Unicode multi-width chars.
# Cursors do not seem to work on pads.
#self.parent.curses_pad.move(self.rely, self.cursor_position - self.begin_at)
# let's have a fake cursor
#_cur_loc_x = self.cursor_position - self.begin_at + self.relx
# The following two lines work fine for ascii, but not for unicode
#char_under_cur = self.parent.curses_pad.inch(self.rely, _cur_loc_x)
#self.parent.curses_pad.addch(self.rely, self.cursor_position - self.begin_at + self.relx, char_under_cur, curses.A_STANDOUT)
#The following appears to work for unicode as well.
try:
char_under_cur = self.value[self.cursor_position]
except IndexError:
char_under_cur = self.cursor_empty_character
attr = 0
if self.cursor_bold:
attr |= curses.A_BOLD
if self.cursor_underline:
attr |= curses.A_UNDERLINE
if self.do_colors():
if self.highlight:
attr |= self.form.theme_manager.find_pair(self, self.cursor_highlight_color)
else:
attr |= self.form.theme_manager.find_pair(self,
self.cursor_color)
else:
attr |= curses.A_REVERSE
self.addstr(self.rely,
self.relx + self.cursor_position - self.begin_at,
char_under_cur,
attr)
def h_addch(self, inpt):
if self.editable:
#self.value = self.value[:self.cursor_position] + curses.keyname(input) \
# + self.value[self.cursor_position:]
#self.cursor_position += len(curses.keyname(input))
#workaround for the metamode bug:
if self._last_get_ch_was_unicode == True and isinstance(self.value, bytes):
#probably dealing with python2.
#note: I am pretty much assuming npyscreen2 will be python3 only
ch_adding = inpt
self.value = self.value.decode()
elif self._last_get_ch_was_unicode == True:
ch_adding = inpt
else:
try:
ch_adding = chr(inpt)
except TypeError:
ch_adding = input
self.value = self.value[:self.cursor_position] + ch_adding \
+ self.value[self.cursor_position:]
self.cursor_position += len(ch_adding)
# or avoid it entirely:
#self.value = self.value[:self.cursor_position] + curses.ascii.unctrl(input) \
# + self.value[self.cursor_position:]
#self.cursor_position += len(curses.ascii.unctrl(input))
def h_cursor_left(self, inpt):
self.cursor_position -= 1
def h_cursor_right(self, inpt):
self.cursor_position += 1
def h_delete_left(self, inpt):
if self.editable and self.cursor_position > 0:
self.value = self.value[:self.cursor_position - 1] + \
self.value[self.cursor_position:]
self.cursor_position -= 1
self.begin_at -= 1
|
lagopus/lagopus | test/datastore/long_run/lib/async_datastore_cmd.py | Python | apache-2.0 | 3,080 | 0.000649 | #!/usr/bin/env python
import sys
import socket
import ssl
import os
import select
import json
import logging
import asyncore
import threading
import six
from six.moves import _thread
from six.moves import queue
from contextlib import contextmanager
from const import *
class AsyncDataStoreCmd(asyncore.dispatcher):
def __init__(self, host="127.0.0.1", port=12345, is_tls=False,
certfile=None, keyfile=None, ca_certs=None):
asyncore.dispatcher.__init__(self)
self.is_tls = is_tls
if self.is_tls:
self.certfile = certfile
self.keyfile = keyfile
self.ca_certs = ca_certs
self.host = host
self.port = port
self.wbuf = b""
self.queue = queue.Queue()
self.th = None
def create_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.is_tls:
ssl_sock = ssl.wrap_socket(
sock,
certfile=self.certfile,
keyfile=self.keyfile,
ca_certs=self.ca_certs,
cert_reqs=ssl.CERT_REQUIRED)
sock = ssl_sock
sock.setblocking(0)
self.set_socket(sock)
def connect(self):
asyncore.dispatcher.connect(self, (self.host, self.port))
proto = "TLS" if self.is_tls else "TCP"
logging.info(
"connected: " + self.host + ":" + str(self.port) + "(" + proto + ")")
def handle_close(self):
| self.close()
def writable(self):
return ((len(self.wbuf) > 0) or (not self.queue.empty()))
def handle_write(self):
try:
if len(self.wbuf) == 0:
self.wbuf = self.queue.get_nowait()
if self.wbuf is None:
_thread.exit()
w = self.wbuf
if six.PY3:
w = self.wbuf.encode()
| sentlen = self.send(w)
self.wbuf = self.wbuf[sentlen:]
except queue.Empty:
pass
def readable(self):
return True
def handle_read(self):
# ignore
data = self.recv(BUFSIZE)
if not data:
raise RuntimeError("connection broken!")
logging.debug("rcve: %s" % data)
def send_cmd(self, cmd):
if cmd is not None:
cmd += "\n"
self.queue.put(cmd)
def loop(self):
asyncore.loop(timeout=0.1)
def run(self):
self.th = threading.Thread(target=self.loop)
self.th.start()
def join(self):
self.th.join()
def is_alive(self):
self.th.is_alive()
@contextmanager
def open_async_ds_cmd(**kwds):
try:
adsc = AsyncDataStoreCmd(**kwds)
adsc.create_sock()
adsc.connect()
adsc.run()
yield adsc
finally:
adsc.send_cmd(None)
adsc.join()
adsc.close()
if __name__ == "__main__":
# tests
# precondition: start lagopus.
with open_async_ds_cmd() as adsc:
adsc.send_cmd("channel cahnnel01 create")
adsc.send_cmd("channel")
|
ksmit799/Toontown-Source | toontown/coghq/DistributedMintBattleAI.py | Python | mit | 2,940 | 0.006803 | from toontown.toonbase import ToontownGlobals
from toontown.coghq import DistributedLevelBattleAI
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import State
from direct.fsm import ClassicFSM, State
from toontown.battle.BattleBase import *
import CogDisguiseGlobals
from toontown.toonbase.ToontownBattleGlobals import getMintCreditMultiplier
from direct.showbase.PythonUtil import addListsByValue
class DistributedMintBattleAI(DistributedLevelBattleAI.DistributedLevelBattleAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedMintBattleAI')
def __init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, roundCallback = None, finishCallback = None, maxSuits = 4):
DistributedLevelBattleAI.DistributedLevelBattleAI.__init__(self, air, battleMgr, pos, suit, toonId, zoneId, level, battleCellId, 'MintReward', roundCallback, finishCallback, maxSuits)
self.battleCalc.setSkillCreditMultiplier(1)
if self.bossBattle:
self.level.d_setBossConfronted(toonId)
self.fsm.addState(State.State('MintReward', self.enterMintReward, self.exitMintReward, ['Resume']))
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('MintReward')
def getTaskZoneId(self):
return self.level.mintId
def handleToonsWon(self, toons):
extraMerits = [0,
| 0,
0,
| 0]
amount = ToontownGlobals.MintCogBuckRewards[self.level.mintId]
index = ToontownGlobals.cogHQZoneId2deptIndex(self.level.mintId)
extraMerits[index] = amount
for toon in toons:
recovered, notRecovered = self.air.questManager.recoverItems(toon, self.suitsKilled, self.getTaskZoneId())
self.toonItems[toon.doId][0].extend(recovered)
self.toonItems[toon.doId][1].extend(notRecovered)
meritArray = self.air.promotionMgr.recoverMerits(toon, self.suitsKilled, self.getTaskZoneId(), getMintCreditMultiplier(self.getTaskZoneId()), extraMerits=extraMerits)
if toon.doId in self.helpfulToons:
self.toonMerits[toon.doId] = addListsByValue(self.toonMerits[toon.doId], meritArray)
else:
self.notify.debug('toon %d not helpful list, skipping merits' % toon.doId)
def enterMintReward(self):
self.joinableFsm.request('Unjoinable')
self.runableFsm.request('Unrunable')
self.resetResponses()
self.assignRewards()
self.bossDefeated = 1
self.level.setVictors(self.activeToons[:])
self.timer.startCallback(BUILDING_REWARD_TIMEOUT, self.serverRewardDone)
return None
def exitMintReward(self):
return None
def enterResume(self):
DistributedLevelBattleAI.DistributedLevelBattleAI.enterResume(self)
if self.bossBattle and self.bossDefeated:
self.battleMgr.level.b_setDefeated()
|
MAPC/myschoolcommute | survey/views.py | Python | gpl-3.0 | 12,688 | 0.001813 | from django.conf import settings
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.utils import simplejson, dateparse
from django.db.models import Count, Q
from django.forms.models import (
inlineformset_factory, modelformset_factory, BaseModelFormSet
)
from django.contrib.auth.decorators import login_required
from django.core.mail import send_mail
from django.contrib.auth.decorators import permission_required
from datetime import datetime, timedelta
from survey.models import (
School, Survey, SurveySet, SurveySet, Child, District, Street
)
from survey.forms import SurveyForm, BatchForm, SurveySetForm, ChildForm
from maps import ForkRunR
import os
def index(request):
# get all districts with active school surveys
active_schools = School.objects.filter(
Q(surveyset__begin__lte=datetime.now()) &
Q(surveyset__end__gte=datetime.now())
)
districts = District.objects.filter(school__in=active_schools).distinct()
return render_to_response('survey/index.html', locals(), context_instance=RequestContext(request))
def district(request, district_slug):
district = District.objects.get(slug__iexact=district_slug)
return render_to_response('survey/district.html', {
'district': district,
'MEDIA_URL': settings.MEDIA_URL,
}, context_instance=RequestContext(request))
@permission_required('survey.change_district')
def district_list(request):
districts = District.objects.all()
districts = districts.annotate(school_count=Count('school', distinct=True))
districts = districts.annotate(survey_count=Count('school__survey', distinct=True))
return render_to_response('survey/district_list.html', locals(), context_instance=RequestContext(request))
@login_required
def school_edit(request, district_slug, school_slug, **kwargs):
# check if district exists
district = get_object_or_404(District.objects, slug__iexact=district_slug)
# get school in district
school = get_object_or_404(School.objects, districtid=district, slug__iexact=school_slug)
# translate to lat/lon
school.geometry.transform(4326)
class BaseSurveySetFormSet(BaseModelFormSet):
def __init__(self, *args, **kwargs):
super(BaseSurveySetFormSet, self).__init__(*args, **kwargs)
self.queryset = SurveySet.objects.filter(school=school).order_by('-begin')
surveysets = SurveySet.objects.filter(school=school)
SurveySetFormSet = modelformset_factory(
SurveySet, formset=BaseSurveySetFormSet, form=SurveySetForm,
can_delete=True, extra=1
)
if request.method == 'POST':
formset = SurveySetFormSet(request.POST)
if formset.is_valid():
sets = formset.save(commit=False)
for surveyset in sets:
surveyset.school = school
surveyset.save()
formset = SurveySetFormSet()
else:
#formset = SurveySetFormSet()
formset = SurveySetFormSet()
surveys = Survey.objects.filter(school=school)
count_day = surveys.filter(created__gte=datetime.today() - timedelta(hours=24)).count()
count_week = surveys.filter(created__gte=datetime.today() - timedelta(days=7)).count()
return render_to_response('survey/school_edit.html', {
'school': school,
'district': district,
'surveys': surveys,
'count_day': count_day,
'count_week': count_week,
'formset': formset,
'surveysets': surveysets,
'now': datetime.now()
},
context_instance=RequestContext(request)
)
@login_required
def school_report(request, school_id, start, end):
school = School.objects.get(pk=school_id)
start_d = dateparse.parse_date(start)
end_d = dateparse.parse_date(end)
report_path = "reports/%s/%s_%s_report.pdf" % (
school.slug, start, end
)
full_path = settings.MEDIA_ROOT + '/' + report_path
full_url = settings.MEDIA_URL + '/' + report_path
path = ForkRunR(
school.pk,
start_d,
end_d
)
dir_name = os.path.dirname(full_path)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
os.rename(path, full_path)
send_mail(
"Your report for school %s, date range %s - %s" % (
school, start, end
),
"You may download it at http://%s/%s" % (
request.META['HTTP_HOST'], full_url
),
settings.SERVER_EMAIL,
[request.user.email]
)
return HttpResponseRedirect(full_url)
def get_schools(request, districtid):
"""
Returns all schools for given district as JSON
"""
# check if district exists
district = get_object_or_404(District.objects, districtid=districtid)
schools = School.objects.filter(districtid=district).filter(
Q(surveyset__begin__lte=datetime.now()) &
Q(surveyset__end__gte=datetime.now())
)
response = {}
for school in schools:
response[school.id] = dict(name=school.name, url=school.get_absolute_url())
return HttpResponse(simplejson.dumps(response), mimetype='application/json')
def get_streets(request, districtid):
"""
Returns all streets for given district
"""
# check if district exists
district = get_object_or_404(District.objects, districtid=districtid)
streets = Street.objects.filter(districtid=districtid)
street_list = []
for street in streets:
street_list.append(street.name)
return HttpResponse(simplejson.dumps(street_list), mimetype='application/json')
def school_streets(request, school_id, query=None):
"""
Returns initial list of unique streets within 5000 meters of school
"""
school = School.objects.get(pk=school_id)
intersections = school.get_intersections()
streets = intersections.values('st_name_1').distinct()
if query is not None and query.strip() != '':
streets = streets.filter(st_name_1__icontains=query)
data = [row['st_name_1'].title() for row in list(streets)]
return HttpResponse(simplejson.dumps(sorted(data)), mimetype='application/json')
def school_crossing(request, school_id, street, query=None):
"""
Returns list of unique streets within 5000 meters of school crossing
another street, by name
"""
school = School.objects.get(pk=school_id)
intersections = school.get_intersections()
streets = intersections.filter(st_name_1__iexact=street).values('st_name_2').distinct()
if query is not None and query.strip() != '':
streets = streets.filter(st_name_2__icontains=query)
data = [row['st_name_2'].title() for row in list(streets)]
return HttpResponse(simplejson.dumps(data), mimetype='application/json')
def intersection(request, school_id, street1, street2=None):
"""
Returns intersection GeoJSON based on a | t least one crossing street
"""
school = School.objects.get(pk | =school_id)
intersections = school.get_intersections()
intersections = intersections.filter(st_name_1__iexact=street1)
if street2 is not None and street2.strip() != "":
intersections = intersections.filter(st_name_2__iexact=street2)
features = []
for f in list(intersections.distinct()):
f.geometry.transform(4326)
features.append(""" {
"type": "Feature",
"geometry": %s,
"properties": {"id": %d, "street1": "%s", "street2": "%s"}
}""" % (f.geometry.geojson, f.pk, f.st_name_1.title(), f.st_name_2.title()))
json_text = """{
"type": "FeatureCollection",
"features": [%s]
}""" % ((",\n").join(features))
return HttpResponse(json_text)
def form(request, district_slug, school_slug, **kwargs):
# check if district exists
district = get_object_or_404(District.objects, slug__iexact=district_slug)
# get school in district
school = get_object_or_404(School.objects, districtid=district, slug__iexact=school_slug)
# translate to lat/lo |
openatv/enigma2 | lib/python/Screens/Recording.py | Python | gpl-2.0 | 4,584 | 0.022469 | from os import stat
from os.path import isdir, join as pathjoin
from Components.config import config
from Components.UsageConfig import preferredPath
from Screens.LocationBox import defaultInhibitDirs, MovieLocationBox
from Screens.MessageBox import MessageBox
from Screens.Setup import Setup
from Tools.Directories import fileAccess
class RecordingSettings(Setup):
def __init__(self, session):
self.styles = [("<default>", _("<Default movie location>")), ("<current>", _("<Current movielist location>")), ("<timer>", _("<Last timer location>"))]
self.styleKeys = [x[0] for x in self.styles]
self.inhibitDevs = []
for dir in defaultInhibitDirs + ["/", "/media"]:
if isdir(dir):
device = stat(dir).st_dev
if device not in self.inhibitDevs:
self.inhibitDevs.append(device)
self.buildChoices("DefaultPath", config.usage.default_path, None)
self.buildChoices("TimerPath", config.usage.timer_path, None)
self.buildChoices("InstantPath", config.usage.instantrec_path, None)
Setup.__init__(self, session=session, setup="Recording")
self.greenText = self["key_green"].text
self.errorItem = -1
if self.getCurrentItem() in (config.usage.default_path, config.usage.timer_path, config.usage.instantrec_path):
self.pathStatus(self.getCurrentValue())
def selectionChanged(self):
if self.errorItem == -1:
Setup.selectionChanged(self)
else:
self["config"].setCurrentIndex(self.errorItem)
def changedEntry(self):
if self.getCurrentItem() in (config.usage.default_path, config.usage.timer_path, config.usage.instantrec_path):
self.pathStatus(self.getCurrentValue())
Setup.changedEntry(self)
def keySelect(self):
item = self.getCurrentItem()
if item in (config.usage.default_path, config.usage.timer_path, config.usage.instantrec_path):
# print("[Recordings] DEBUG: '%s', '%s', '%s'." % (self.getCurrentEntry(), item.value, preferredPath(item.value)))
self.session.openWithCallback(se | lf.pathSelect, MovieLocationBox, self.getCurrentEntry(), preferredPath(item.value))
else:
Setup.keySelect(self)
def keySave(self):
if self.errorItem == -1:
Setup.keySave( | self)
else:
self.session.open(MessageBox, "%s\n\n%s" % (self.getFootnote(), _("Please select an acceptable directory.")), type=MessageBox.TYPE_ERROR)
def buildChoices(self, item, configEntry, path):
configList = config.movielist.videodirs.value[:]
styleList = [] if item == "DefaultPath" else self.styleKeys
if configEntry.saved_value and configEntry.saved_value not in styleList + configList:
configList.append(configEntry.saved_value)
configEntry.value = configEntry.saved_value
if path is None:
path = configEntry.value
if path and path not in styleList + configList:
configList.append(path)
pathList = [(x, x) for x in configList] if item == "DefaultPath" else self.styles + [(x, x) for x in configList]
configEntry.value = path
configEntry.setChoices(pathList, default=configEntry.default)
# print("[Recordings] DEBUG %s: Current='%s', Default='%s', Choices='%s'." % (item, configEntry.value, configEntry.default, styleList + configList))
def pathSelect(self, path):
if path is not None:
path = pathjoin(path, "")
item = self.getCurrentItem()
if item is config.usage.default_path:
self.buildChoices("DefaultPath", config.usage.default_path, path)
else:
self.buildChoices("DefaultPath", config.usage.default_path, None)
if item is config.usage.timer_path:
self.buildChoices("TimerPath", config.usage.timer_path, path)
else:
self.buildChoices("TimerPath", config.usage.timer_path, None)
if item is config.usage.instantrec_path:
self.buildChoices("InstantPath", config.usage.instantrec_path, path)
else:
self.buildChoices("InstantPath", config.usage.instantrec_path, None)
self["config"].invalidateCurrent()
self.changedEntry()
def pathStatus(self, path):
if path.startswith("<"):
self.errorItem = -1
footnote = ""
green = self.greenText
elif not isdir(path):
self.errorItem = self["config"].getCurrentIndex()
footnote = _("Directory '%s' does not exist!") % path
green = ""
elif stat(path).st_dev in self.inhibitDevs:
self.errorItem = self["config"].getCurrentIndex()
footnote = _("Flash directory '%s' not allowed!") % path
green = ""
elif not fileAccess(path, "w"):
self.errorItem = self["config"].getCurrentIndex()
footnote = _("Directory '%s' not writable!") % path
green = ""
else:
self.errorItem = -1
footnote = ""
green = self.greenText
self.setFootnote(footnote)
self["key_green"].text = green
|
MrAlone/mzbench | lib/bdl_utils_test.py | Python | bsd-3-clause | 3,059 | 0.006211 | #!/usr/bin/env nosetests
# This is nose based tests for benc | hDL translator, run "pip install nose" if you don't have "nosetests"
import bdl_utils
from nose.tools import eq_
def test_indents():
eq_(bdl_utils.add_indents('#!benchDL\nmake_instal | l(git = "git@github.com:foo/bar", branch = "b")'),
'#!benchDL\nmake_install(git = "git@github.com:foo/bar", branch = "b")')
def test_indents_2():
eq_(bdl_utils.add_indents('#!benchDL\npool(size = 1):\n do_stuff(1,2)'),
'#!benchDL\npool(size = 1):\n_INDENT_ do_stuff(1,2)\n_DEDENT_ ')
def test_indents_3():
eq_(bdl_utils.add_indents('#!benchDL\npool(size = 1):\n do_stuff(2,3)\n #comment\n do_stuff(1,2)'),
'#!benchDL\npool(size = 1):\n_INDENT_ do_stuff(2,3)\n #comment\n do_stuff(1,2)\n_DEDENT_ ')
def test_indents_4():
eq_(bdl_utils.add_indents("""#!benchDL
pool(size = 1,
worker_type = dummy_worker):
do_stuff(1,2)"""),
"""#!benchDL
pool(size = 1,
worker_type = dummy_worker):
_INDENT_ do_stuff(1,2)
_DEDENT_ """)
def test_indents_5():
eq_(bdl_utils.add_indents("""#!benchDL
# (
pool(size = 1,
worker_type = dummy_worker):
do_stuff(1,2)"""),
"""#!benchDL
# (
pool(size = 1,
worker_type = dummy_worker):
_INDENT_ do_stuff(1,2)
_DEDENT_ """)
def test_indents_6():
eq_(bdl_utils.add_indents('#!benchDL\n'
'pool(size = 1, attrib = "\\\"#("):\n'
' do_stuff(3)\n'
' #comment\n do_stuff(1,2)'),
'#!benchDL\n'
'pool(size = 1, attrib = "\\\"#("):\n'
'_INDENT_ do_stuff(3)\n'
' #comment\n'
' do_stuff(1,2)\n'
'_DEDENT_ ')
def test_includes():
eq_(bdl_utils.get_includes(bdl_utils.convert("""#!benchDL
include_resource(test_json, "file.json", json)
pool(size = 17,
worker_type = dummy_worker):
do_stuff(1,2)""", {})), [["test_json", "file.json"]])
def test_includes_2():
eq_(bdl_utils.get_includes(bdl_utils.convert("""#!benchDL
include_resourse()
pool(size = 17,
worker_type = dummy_worker):
do_stuff(1,2)""", {})), [])
def test_includes_3():
eq_(bdl_utils.get_includes(bdl_utils.convert("""#!benchDL
# second comment
include_resource(test_json, "file.json", json)
pool(size = 17,
worker_type = dummy_worker):
do_stuff(1,2)""", {})), [["test_json", "file.json"]])
def test_num_of_workers():
eq_(bdl_utils.get_num_of_workers(bdl_utils.convert("""#!benchDL
pool(size = 17,
worker_type = dummy_worker):
do_stuff(1,2)
pool(size = 13,
worker_type = dummy_worker):
do_stuff(1,2)""", {})), 30)
def test_num_of_workers_2():
eq_(bdl_utils.get_num_of_workers(bdl_utils.convert("""#!benchDL
pool(size = var("num2", 3),
worker_type = dummy_worker):
do_stuff(1,2)
pool(size = var("num", 3),
worker_type = dummy_worker):
do_stuff(1,2)""", {"num": 7})), 10)
def test_num_of_workers_3():
eq_(bdl_utils.get_num_of_workers(bdl_utils.convert("""#!benchDL
pool(size = 17K,
worker_type = dummy_worker):
do_stuff(1,2)
pool(size = 13M,
worker_type = dummy_worker):
do_stuff(1,2)""", {})), 13017000)
|
linuxdeepin/dde-daemon | network/examples/python/dbus_gen/com_deepin_daemon_Network.py | Python | gpl-3.0 | 20,639 | 0.005281 | '''
Created with dbus2any
https://github.com/hugosenari/dbus2any
This code require python-dbus
Parameters:
* pydbusclient.tpl
* ./dbus_gen/dbus_dde_daemon_network.xml
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html
http://dbus.freedesktop.org/doc/dbus-python/doc/tutorial.html
'''
import dbus
class Network(object):
'''
com.deepin.daemon.Network
Usage:
------
Instantiate this class and access the instance members and methods
>>> obj = Network(BUS_NAME, OBJECT_PATH)
'''
def __init__(self, bus_name, object_path, interface=None, bus=None):
'''Constructor'''
self._dbus_interface_name = interface or "com.deepin.daemon.Network"
self._dbus_object_path = object_path
self._dbus_name = bus_name
bus = bus or dbus.SessionBus()
self._dbus_object = bus.get_object(self._dbus_name, self._dbus_object_path)
self._dbus_interface = dbus.Interface(self._dbus_object,
dbus_interface=self._dbus_interface_name)
self._dbus_properties = obj = dbus.Interface(self._dbus_object,
"org.freedesktop.DBus.Properties")
def _get_property(self, name):
return self._dbus_properties.Get(self._dbus_interface_name, name)
def _set_property(self, name, val):
return self._dbus_properties.Set(self._dbus_interface_name, name, val)
def ActivateAccessPoint(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
: OBJECT_PATH
: OBJECT_PATH
return:
: OBJECT_PATH
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.ActivateAccessPoint(*arg, **kw)
def ActivateConnection(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
: OBJECT_PATH
return:
: OBJECT_PATH
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.ActivateConnection(*arg, **kw)
def CancelSecret(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.CancelSecret(*arg, **kw)
def CreateConnection(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
: OBJECT_PATH
return:
: OBJECT_PATH
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.CreateConnection(*arg, **kw)
def CreateConnectionForAccessPoint(self, *arg, **kw):
'''
Method (call me)
params:
: OBJECT_PATH
: OBJECT_PATH
return:
: OBJECT_PATH
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.CreateConnectionForAccessPoint(*arg, **kw)
def DeactivateConnection(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.DeactivateConnection(*arg, **kw)
def DeleteConnection(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.DeleteConnection(*arg, **kw)
def DisconnectDevice(self, *arg, **kw):
'''
Method (call me)
params:
: OBJECT_PATH
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.DisconnectDevice(*arg, **kw)
def EditConnection(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
: OBJECT_PATH
return:
: OBJECT_PATH
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.EditConnection(*arg, **kw)
def EnableDevice(self, *arg, **kw):
'''
Method (call me)
params:
: OBJECT_PATH
: BOOLEAN
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.EnableDevice(*arg, **kw)
def FeedSecret(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
: STRING
: STRING
: BOOLEAN
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.FeedSecret(*arg, **kw)
def GetAccessPoints(self, *arg, **kw):
'''
Method (call me)
params:
: OBJECT_PATH
return:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetAccessPoints(*arg, **kw)
def GetActiveConnectionInfo(self, *arg, **kw):
'''
Method (call me)
return:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetActiveConnectionInfo(*arg, **kw)
def GetAutoProxy(self, *arg, **kw):
'''
Method (call me)
return:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetAutoProxy(*arg, **kw)
def GetProxy(self, *arg, **kw):
'''
Method (call me)
params:
: STRING
return:
: STRING
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetProxy(*arg, **kw)
def GetProxyIgnoreHosts(self, *arg, **kw):
'''
Method (call me)
return:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetProxyIgnoreHosts(*arg, **kw)
def GetProxyMethod(self, *arg, **kw):
'''
Method (call me)
return:
: STRING
See also:
http: | //dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interf | ace.GetProxyMethod(*arg, **kw)
def GetSupportedConnectionTypes(self, *arg, **kw):
'''
Method (call me)
return:
: as
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetSupportedConnectionTypes(*arg, **kw)
def GetWiredConnectionUuid(self, *arg, **kw):
'''
Method (call me)
params:
: OBJECT_PATH
return:
: STRING
See also:
http://dbus.freedesktop.org/doc/dbus-specification.html#idp94392448
'''
return self._dbus_interface.GetWiredConnectionUuid(*arg, **kw)
def IsDeviceEnabled(self, *arg, **kw):
'''
Method (call me)
params:
: OBJECT_PATH
|
xiaohan2012/temporal-topic-mining | test_util.py | Python | mit | 748 | 0.00134 | # coding: utf8
from util import load_line_corpus
from nose.tools import assert_equal
def test_load_line_corpus():
corpus = load_line_corpus("test_data/line_corpus.dat")
corpus = list(corpus)
assert_equal(len(corpus), 3)
assert_equal(corpus[0], u"We introduce the Randomized Dependence Coefficient (RDC), a measure of non-linear de | pendence between random variables of arbitrary dimension based on the Hirschfeld-Gebelein-Rényi Maximum Correla | tion Coefficient. RDC is defined in terms of correlation of random non-linear copula projections; it is invariant with respect to marginal distribution transformations, has low computational cost and is easy to implement: just five lines of R code, included at the end of the paper.")
|
foobarbazblarg/stayclean | stayclean-2016-august/update-google-chart.py | Python | mit | 5,485 | 0.003282 | #!/usr/bin/python
import json
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
import datetime
from participantCollection import ParticipantCollection
# Edit Me!
par | ticipantFileNames = ['../stayclean-2014-november/participants.txt',
'../stayclean-2014-december/participants.t | xt',
'../stayclean-2015-january/participants.txt',
'../stayclean-2015-february/participants.txt',
'../stayclean-2015-march/participants.txt',
'../stayclean-2015-april/participants.txt',
'../stayclean-2015-may/participants.txt',
'../stayclean-2015-june/participants.txt',
'../stayclean-2015-july/participants.txt',
'../stayclean-2015-august/participants.txt',
'../stayclean-2015-september/participants.txt',
'../stayclean-2015-october/participants.txt',
'../stayclean-2015-november/participants.txt',
'../stayclean-2015-december/participants.txt',
'../stayclean-2016-january/participants.txt',
'../stayclean-2016-february/participants.txt',
'../stayclean-2016-march/participants.txt',
'../stayclean-2016-april/participants.txt',
'../stayclean-2016-may/participants.txt',
'../stayclean-2016-june/participants.txt',
'../stayclean-2016-july/participants.txt',
'./participants.txt']
sortedRelapseDates = []
for participantFileName in participantFileNames:
participants = ParticipantCollection(fileNameString=participantFileName)
sortedRelapseDates = sortedRelapseDates + participants.allRelapseDates()
sortedRelapseDates.sort()
earliestReportDate = sortedRelapseDates[0]
latestReportDate = sortedRelapseDates[-1]
reportDates = []
numberOfRelapsesPerDate = []
reportDatesAndNumberOfRelapses = {}
dayOfWeekIndexesAndNumberOfInstances = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
reportDate = earliestReportDate
while reportDate <= latestReportDate:
reportDatesAndNumberOfRelapses[reportDate] = 0
# dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] = dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] + 1
dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] += 1
reportDate += datetime.timedelta(days=1)
for relapseDate in sortedRelapseDates:
# reportDatesAndNumberOfRelapses[relapseDate] = reportDatesAndNumberOfRelapses[relapseDate] + 1
reportDatesAndNumberOfRelapses[relapseDate] += 1
dayOfWeekIndexesAndTotalNumberOfRelapses = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for participantFileName in participantFileNames:
participants = ParticipantCollection(fileNameString=participantFileName)
# print participants.relapseDayOfWeekIndexesAndParticipants()
for index, parts in participants.relapseDayOfWeekIndexesAndParticipants().iteritems():
# dayOfWeekIndexesAndTotalNumberOfRelapses[index] = dayOfWeekIndexesAndTotalNumberOfRelapses[index] + len(parts)
dayOfWeekIndexesAndTotalNumberOfRelapses[index] += len(parts)
dayOfWeekIndexesAndAverageNumberOfRelapses = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for index, instances in dayOfWeekIndexesAndNumberOfInstances.iteritems():
# dayOfWeekIndexesAndAverageNumberOfRelapses[index] = int(round(float(dayOfWeekIndexesAndTotalNumberOfRelapses[index]) / float(instances)))
dayOfWeekIndexesAndAverageNumberOfRelapses[index] = float(dayOfWeekIndexesAndTotalNumberOfRelapses[index]) / float(instances)
spreadsheetTitle = "StayClean monthly challenge relapse data"
# spreadsheetTitle = "Test spreadsheet"
json_key = json.load(open('../google-oauth-credentials.json'))
scope = ['https://spreadsheets.google.com/feeds']
credentials = SignedJwtAssertionCredentials(json_key['client_email'], json_key['private_key'].encode(), scope)
gc = gspread.authorize(credentials)
spreadSheet = None
try:
spreadSheet = gc.open(spreadsheetTitle)
except gspread.exceptions.SpreadsheetNotFound:
print "No spreadsheet with title " + spreadsheetTitle
exit(1)
workSheet = spreadSheet.get_worksheet(0)
columnACells = workSheet.range("A2:A" + str(len(reportDatesAndNumberOfRelapses) + 1))
columnBCells = workSheet.range("B2:B" + str(len(reportDatesAndNumberOfRelapses) + 1))
columnCCells = workSheet.range("C2:C8")
columnDCells = workSheet.range("D2:D8")
reportDate = earliestReportDate
rowIndex = 0
while reportDate <= latestReportDate:
columnACells[rowIndex].value = str(reportDate)
columnBCells[rowIndex].value = str(reportDatesAndNumberOfRelapses[reportDate])
rowIndex += 1
reportDate += datetime.timedelta(days=1)
for weekdayIndex in range(0, 7):
weekdayName = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][weekdayIndex]
# spreadsheetClient.UpdateCell(weekdayIndex + 2,3,weekdayName,spreadsheetId)
# spreadsheetClient.UpdateCell(weekdayIndex + 2,4,str(dayOfWeekIndexesAndAverageNumberOfRelapses[weekdayIndex]),spreadsheetId)
columnCCells[weekdayIndex].value = weekdayName
columnDCells[weekdayIndex].value = str(dayOfWeekIndexesAndAverageNumberOfRelapses[weekdayIndex])
allCells = columnACells + columnBCells + columnCCells + columnDCells
workSheet.update_cells(allCells)
exit(0)
|
stephrdev/loetwerk | journeyman/projects/forms.py | Python | mit | 1,763 | 0.010777 | from django import forms
from journeyman.projects.models import Project
class Reposi | toryForm(forms.Form):
name = forms.CharField(
help_text='What\'s the name of your awesome project?')
repository = forms.CharField(
help_text='Please enter a valid repository url \
(e.g. git+git://github.com/stephrdev/loetwerk.git)')
class BuildProcessForm(forms.Form):
build_steps = forms.CharField(initial='python setup.py install',
widget=forms.Textarea(attrs={'rows':3, 'cols':40}),
help_text='Let\'s start off with the easy stuff, please type in all \
| the commands needed to install your package')
test_steps = forms.CharField(initial='python setup.py test',
widget=forms.Textarea(attrs={'rows':3, 'cols':40}),
help_text='Now tell us how to run your tests. If you should have \
many different test suites, just add another line.')
dependencies = forms.CharField(required=False, initial='dependencies.txt',
widget=forms.Textarea(attrs={'rows':3, 'cols':40}),
help_text='Please enter a list of pip requirement files that you \
have used to specify your dependencies')
test_xmls = forms.CharField(required=False,
widget=forms.Textarea(attrs={'rows':3, 'cols':40}),
help_text='Please enter a newline separated list of paths of \
unit test result xmls.')
class JourneyConfigOutputForm(forms.Form):
pass
class JourneyConfigFileForm(forms.Form):
config_file = forms.CharField(initial="journey.conf/config",
help_text="If you leave this field blank, we will store the config \
locally.", required=False)
class ProjectForm(forms.ModelForm):
class Meta:
model = Project
exclude = ['active',]
|
infant-cognition-tampere/drop | drop/Experiment.py | Python | mit | 3,168 | 0 | """Experiment-class."""
import glib
from Section import Section
class Experiment:
"""
Experiment class.
Takes care of the experiment presentation
(window, ... control) will work one experiment file and control the
flow of sections. "Experiment"-level stuff.
"""
def __init__(self, views, ctrl, experiment_data, filename, mediadir,
on_experiment_done):
"""Constructor."""
self.experiment_id = filename
self.data = experiment_data
self.ctrl = ctrl
self.on_experiment_done = on_experiment_done
self.section_num = -1
self.section = None
self.views = views
self.mediadir = mediadir
self.section_prepare(0)
def stop(self):
"""Stop experiment."""
if self.section is not None:
self.section.stop()
def next_phase(self):
"""Jump to next phase."""
if self.section is not None:
self.section.next_phase()
def section_prepare(self, nextsection):
"""Perform pre-section opearations | ."""
# end experiment?
if nextsection >= len(self.data):
glib.idle_add(self.on_experiment_done)
return False
self.section_num = nextsection
sectioninfo = self.data[self.section_num]
if "collect_data" in sectioninfo["options"]:
# check if the u | ser wanted to start data collection on all devices
# or just one?
self.ctrl.start_collecting_data(sectioninfo["name"])
glib.idle_add(self.section_start)
def section_start(self):
"""Create new section instance and start it."""
sectioninfo = self.data[self.section_num]
# generate the object for the next section
self.section = Section(self.mediadir, sectioninfo.copy(),
self.on_section_end, self.ctrl.timestamp)
self.ctrl.add_model(self.section)
for view in self.views:
view.add_model(self.section)
self.section.run()
def on_section_end(self):
"""Callback for section_end."""
for view in self.views:
view.remove_model(self.section)
self.section = None
sectioninfo = self.data[self.section_num]
if "collect_data" in sectioninfo["options"]:
glib.idle_add(self.ctrl.stop_collecting_data,
self.on_saving_data_completed)
else:
glib.idle_add(self.on_saving_data_completed)
def on_saving_data_completed(self):
"""Callback for saving_data_completed."""
sectioninfo = self.data[self.section_num]
# check if next section to begin automatically
if "autocontinue" in sectioninfo["options"] and \
self.section_num != len(self.data):
self.section_prepare(self.section_num+1)
else:
self.ctrl.on_section_completed(self.section_num, len(self.data))
def __del__(self):
"""Destructor for the experiment class."""
for view in self.views:
view.stop()
self.views = None
print "Experiment finished."
|
openstack/networking-odl | networking_odl/ml2/port_status_update.py | Python | apache-2.0 | 5,546 | 0 | # Copyright (c) 2017 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from neutron_lib.callbacks import resources
from neutron_lib import constants as n_const
from neutron_lib import context
from neutron_lib.plugins import directory
from neutron_lib import worker
from oslo_log import log
from neutron.db import provisioning_blocks
from networking_odl.common import client as odl_client
from networking_odl.common import odl_features
from networking_odl.common import utils
from networking_odl.common import websocket_client as odl_ws_client
LOG = log.getLogger(__name__)
class OdlPortStatusUpdate(worker.BaseWorker):
"""Class to register and handle port status update"""
PORT_PATH = "restconf/operational/neutron:neutron/ports/port"
def __init__(self):
super(OdlPortStatusUpdate, self).__init__()
self.odl_websocket_client = None
def start(self):
super(OdlPortStatusUpdate, self).start()
LOG.debug('OdlPortStatusUpdate worker running')
if odl_features.has(odl_features.OPERATIONAL_PORT_STATUS):
self.run_websocket()
def stop(self):
if self.odl_websocket_client:
self.odl_websocket_client.set_exit_flag()
def wait(self):
"""Wait for service to complete."""
@staticmethod
def reset():
pass
def run_websocket(self):
# OpenDaylight path to recieve websocket notifications on
neutron_ports_path = "/neutron:neutron/neutron:ports"
self.path_uri = utils.get_odl_url()
self.odl_websocket_client = (
odl_ws_client.OpenDaylightWebsocketClient.odl_create_websocket(
self.path_uri, neutron_ports_path,
odl_ws_client.ODL_OPERATIONAL_DATASTORE,
odl_ws_client.ODL_NOTIFICATION_SCOPE_SUBTREE,
self._process_websocket_recv,
self._process_websocket_reconnect,
True
))
def _process_websocket_recv(self, payload, reconnect):
# Callback for websocket notification
LOG.debug("Websocket notification for port status update")
for event in odl_ws_client.EventDataParser.get_item(payload):
operation, path, data = event.get_fields()
if ((operation in [event.OPERATION_UPDATE,
event.OPERATION_CREATE])):
port_id = event.extract_field(path, "neutron:uuid")
port_id = str(port_id).strip("'")
status_field = data.get('status')
if status_field is not None:
status = status_field.get('content')
LOG.debug("Update port for port id %s %s", port_id, status)
# for now we only support transition from DOWN->ACTIVE
# https://bugs.launchpad.net/networking-odl/+bug/1686023
if status == n_const.PORT_STATUS_ACTIVE:
provisioning_blocks.provisioning_complete(
context.get_admin_context(),
port_id, resources.PORT,
provisioning_blocks.L2_AGENT_ENTITY)
if operation == event.OPERATION_DELETE:
LOG.debug("PortStatus: Ignoring delete operation")
def _process_websocket_reconnect(self, status):
if status == odl_ws_client.ODL_WEBSOCKET_CONNECTED:
# Get port data using restconf
LOG.debug("Websocket notification on reconnection")
reconn_thread = threading.Thread(
name='websocket', target=self._pull_missed_statuses)
reconn_thread.start()
def _pull_missed_statuses(self):
LOG.debug("starting to pull pending statuses...")
plugin = directory.get_plugin()
filter = {"status": [n_const.PORT_STATUS_DOWN],
"vif_type": ["unbound"]}
ports = plugin.get_ports(context.get_admin_context(), filter)
if not ports:
LOG.debug("no down ports found, done")
return
port_fetch_url = utils.get_odl_url(self.PORT_PATH)
client = odl_client.OpenDaylightRestClient.create_client(
url=port_fetch_url)
for port in | ports:
port_id = port["id"]
response = client.get(port_id)
if response.status_code != 200:
LOG.warning("Non-200 response code %s", str(response))
continue
odl_status = response.json()['port'][0]['status']
if odl_status == n_const.PORT_STATUS_ACTIVE:
# for now we only | support transition from DOWN->ACTIVE
# See https://bugs.launchpad.net/networking-odl/+bug/1686023
provisioning_blocks.provisioning_complete(
context.get_admin_context(),
port_id, resources.PORT,
provisioning_blocks.L2_AGENT_ENTITY)
LOG.debug("done pulling pending statuses")
|
trondhindenes/Flauthority | flauthority/api_task_status.py | Python | mit | 1,886 | 0.008484 | from flask_restful import Resource, Api
from flask_restful_swagger import swagger
from flauthority import app
from flauthority import api, app, celery, auth
from ModelClasses import AnsibleCommandModel, AnsiblePlaybookModel, AnsibleExt | raArgsModel
import celery_runner
class TaskStatus(Re | source):
@swagger.operation(
notes='Get the status of an certificate generation task/job',
nickname='taskstatus',
parameters=[
{
"name": "task_id",
"description": "The ID of the task/job to get status for",
"required": True,
"allowMultiple": False,
"dataType": 'string',
"paramType": "path"
}
])
@auth.login_required
def get(self, task_id):
task = celery_runner.generate_certificate.AsyncResult(task_id)
if task.state == 'PENDING':
result = "Task not found"
resp = app.make_response((result, 404))
return resp
elif task.state == 'PROGRESS':
result_obj = {'Status': "PROGRESS",
'description': "Task is currently running",
'returncode': None}
else:
try:
return_code = task.info['returncode']
description = task.info['description']
if return_code is 0:
result_obj = {'Status': "SUCCESS",
'description': description}
else:
result_obj = {'Status': "FLAUTHORITY_TASK_FAILURE",
'description': description,
'returncode': return_code}
except:
result_obj = {'Status': "CELERY_FAILURE"}
return result_obj
api.add_resource(TaskStatus, '/api/taskstatus/<string:task_id>')
|
plotly/python-api | packages/python/plotly/plotly/validators/densitymapbox/colorbar/tickformatstop/_enabled.py | Python | mit | 515 | 0 | import _plotly_utils.basevalidators
class EnabledValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name="enabled",
pare | nt_name="densitymapbox.colorbar.tickformatstop",
| **kwargs
):
super(EnabledValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
rbitia/aci-demos | vk-burst-demo/aci-worker/app/dbAzureBlob.py | Python | mit | 2,832 | 0.006356 | #By Sam Kreter
#For use by Microsoft and other parties to demo
#Azure Container Service, Azure Container Instances
#and the experimental ACI-connector
import os
from azure.storage.blob import BlockBlobService
import sqlite3
COPY_PICS_NUM = 1
class DbAzureBlob:
def __init__(self):
AZURE_BLOB_ACCOUNT = os.environ.get('AZURE_BLOB_ACCOUNT')
if not AZURE_BLOB_ACCOUNT:
raise EnvironmentError("Must have env variables AZURE_BLOB_ACCOUNT set for this to work.")
self.block_blob_service = BlockBlobService(account_name= AZURE_BLOB_ACCOUNT)
def getImageFromAzureBlob(self,filename_src, filename_dest):
try:
self.block_blob_service.get_blob_to_path('pictures', filename_src, filename_dest)
return True
except Exception as ex:
print("getImageFromAzureBlob: ", ex)
return False
def getAllImagesFromAzureBlob(self,container,dest_folder):
generator = self.block_blob_service.list_blobs('pictures')
success = []
for blob in generator:
try:
self.block_blob_service.get_blob_to_path(container, blob.name, dest_folder + blob.name)
success.append(True)
except Exception as ex:
print("getAllImagesFromAzureBlob: ", ex)
success.append(False)
return all(success)
def doubleDatabase(self):
conn = sqlite3.connect('jobs.db')
cursor = conn.execute("SELECT * FROM jobs;")
for row in cursor:
conn.execute("INSERT INTO jobs (filename) \
VALUES (\"" + row[1] + "\");")
conn.commit()
def setupDatabase(self):
conn = sqlite3.connect('jobs.db')
conn.execute('''DROP TABLE IF EXISTS jobs;''')
conn.execute('''
CREATE TABLE jobs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
filename NOT NULL,
processed INTEGER DEFAULT 0 NOT NULL,
detected INTEGER DEFAULT NULL
);
''')
conn.execute('DROP TABLE IF EXISTS time;')
conn.execute('''
CREATE TABLE time (
id INTEGER PRIMARY KEY,
start_time TEXT,
end_time TEXT,
finished INTEGER,
started INTEGER |
);
''')
| conn.execute('INSERT INTO time values(1,"2017-09-23 18:28:24","2017-09-23 18:28:24",0,0);')
generator = self.block_blob_service.list_blobs('pictures')
for blob in generator:
if(blob.name[:2] == "._"):
blob.name = blob.name[2:]
for i in range(COPY_PICS_NUM):
conn.execute("INSERT INTO jobs (filename) \
VALUES (\"" + blob.name + "\");")
conn.commit()
|
w1r0x/ansible-modules-core | cloud/docker/docker.py | Python | gpl-3.0 | 71,287 | 0.004335 | #!/usr/bin/python
# (c) 2013, Cove Schneider
# (c) 2014, Joshua Conner <joshua.conner@gmail.com>
# (c) 2014, Pavel Antonov <antonov@adwz.ru>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
DOCUMENTATION = '''
---
module: docker
version_added: "1.4"
short_description: manage docker containers
description:
- Manage the life cycle of docker containers.
options:
count:
description:
- Number of matching containers that should be in the desired state.
default: 1
image:
description:
- Container image used to match and launch containers.
required: true
pull:
description:
- Control when container images are updated from the C(docker_url) registry.
If "missing," images will be pulled only when missing from the host;
if '"always," the registry will be checked for a newer version of the
image' each time the task executes.
default: missing
choices: [ "missing", "always" ]
version_added: "1.9"
entrypoint:
description:
- Corresponds to ``--entrypoint`` option of ``docker run`` command and
``ENTRYPOINT`` directive of Dockerfile.
Used to match and launch containers.
default: null
required: false
version_added: "2.1"
command:
description:
- Command used to match and launch containers.
default: null
name:
description:
- Name used to match and uniquely name launched containers. Explicit names
are used to uniquely identify a single container or to link among
containers. Mutually exclusive with a "count" other than "1".
default: null
version_added: "1.5"
ports:
description:
- "List containing private to public port mapping specification.
Use docker 'CLI-style syntax: C(8000), C(9000:8000), or C(0.0.0.0:9000:8000)'
where 8000 is a container port, 9000 is a host port, and 0.0.0.0 is - a host interface.
The container ports need to be exposed either in the Dockerfile or via the C(expose) option."
default: null
version_added: "1.5"
expose:
description:
- List of additional container ports to expose for port mappings or links.
If the port is already exposed using EXPOSE in a Dockerfile, you don't
need to expose it again.
default: null
version_added: "1.5"
publish_all_ports:
description:
| - Publish all exposed ports to the host interfaces.
default: false
version_added: "1.5"
volumes:
description:
- List of volumes to mount within the container
- 'Use docker CLI-style syntax: C(/host:/container[:mode])'
- You can specify a read mode for the mount with either C(ro) or C(rw).
Starting at version 2.1, SELinux hosts can additionally use C(z) or C(Z)
mount options to use a shared or private label for | the volume.
default: null
volumes_from:
description:
- List of names of containers to mount volumes from.
default: null
links:
description:
- List of other containers to link within this container with an optional
- 'alias. Use docker CLI-style syntax: C(redis:myredis).'
default: null
version_added: "1.5"
devices:
description:
- List of host devices to expose to container
default: null
required: false
version_added: "2.1"
log_driver:
description:
- You can specify a different logging driver for the container than for the daemon.
"json-file" Default logging driver for Docker. Writes JSON messages to file.
docker logs command is available only for this logging driver.
"none" disables any logging for the container.
"syslog" Syslog logging driver for Docker. Writes log messages to syslog.
docker logs command is not available for this logging driver.
"journald" Journald logging driver for Docker. Writes log messages to "journald".
"gelf" Graylog Extended Log Format (GELF) logging driver for Docker. Writes log messages to a GELF endpoint likeGraylog or Logstash.
"fluentd" Fluentd logging driver for Docker. Writes log messages to "fluentd" (forward input).
"awslogs" (added in 2.1) Awslogs logging driver for Docker. Writes log messages to AWS Cloudwatch Logs.
If not defined explicitly, the Docker daemon's default ("json-file") will apply.
Requires docker >= 1.6.0.
required: false
default: json-file
choices:
- json-file
- none
- syslog
- journald
- gelf
- fluentd
- awslogs
version_added: "2.0"
log_opt:
description:
- Additional options to pass to the logging driver selected above. See Docker `log-driver
<https://docs.docker.com/reference/logging/overview/>` documentation for more information.
Requires docker >=1.7.0.
required: false
default: null
version_added: "2.0"
memory_limit:
description:
- RAM allocated to the container as a number of bytes or as a human-readable
string like "512MB". Leave as "0" to specify no limit.
default: 0
docker_url:
description:
- URL of the host running the docker daemon. This will default to the env
var DOCKER_HOST if unspecified.
default: ${DOCKER_HOST} or unix://var/run/docker.sock
use_tls:
description:
- Whether to use tls to connect to the docker server. "no" means not to
use tls (and ignore any other tls related parameters). "encrypt" means
to use tls to encrypt the connection to the server. "verify" means to
also verify that the server's certificate is valid for the server
(this both verifies the certificate against the CA and that the
certificate was issued for that host. If this is unspecified, tls will
only be used if one of the other tls options require it.
choices: [ "no", "encrypt", "verify" ]
version_added: "1.9"
tls_client_cert:
description:
- Path to the PEM-encoded certificate used to authenticate docker client.
If specified tls_client_key must be valid
default: ${DOCKER_CERT_PATH}/cert.pem
version_added: "1.9"
tls_client_key:
description:
- Path to the PEM-encoded key used to authenticate docker client. If
specified tls_client_cert must be valid
default: ${DOCKER_CERT_PATH}/key.pem
version_added: "1.9"
tls_ca_cert:
description:
- Path to a PEM-encoded certificate authority to secure the Docker connection.
This has no effect if use_tls is encrypt.
default: ${DOCKER_CERT_PATH}/ca.pem
version_added: "1.9"
tls_hostname:
description:
- A hostname to check matches what's supplied in the docker server's
certificate. If unspecified, the hostname is taken from the docker_url.
default: Taken from docker_url
version_added: "1.9"
docker_api_version:
description:
- Remote API version to use. This defaults to the current default as
specified by docker-py.
default: docker-py default remote API version
version_added: "1.8"
docker_user:
description:
- Username or UID to use within the container
required: false
default: null
version_added: "2.0"
username:
description:
- Remote API username.
default: null
password:
description:
- Remote API password.
default: null
email:
description:
- Remote API email.
default: null
hostname:
description:
- Container hostname.
def |
scith/htpc-manager_ynh | sources/libs/ndg/__init__.py | Python | gpl-3.0 | 653 | 0.003063 | """ndg_httpsclient - PyOpenSSL utility to make a httplib-like interface suitable
for use with urllib2
This is a setuptools namespace_packa | ge. DO NOT place any other
code in this file! There is no guarantee that it will be i | nstalled
with easy_install. See:
http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
... for details.
"""
__author__ = "P J Kershaw"
__date__ = "06/01/12"
__copyright__ = "(C) 2012 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__revision__ = '$Id$'
__import__('pkg_resources').declare_namespace(__name__) |
pixlra/HARP-fork | PythonLib/Regex.py | Python | gpl-3.0 | 961 | 0.012487 | #!/usr/bin/env python
# coding: utf8
# (c) 2014 Dominic Springer
# File licensed under GNU GPL (see HARP_License.txt)
import re
import numpy as np
#look for: import re, re.search
# HOW TO ==================================
# 1) Paste line to https://pythex.org/
# 2) Create function to wrap
#==================== | ======================
#==========================================
def get_DimX_DimY_from_Filename(FN):
#==========================================
res = re.search(r"(\d*)x(\d*)", FN)
return (np.int32(res.group(1)), np.int32(res.group(2)))
#==========================================
def fromStartToBracket(Str):
#==========================================
res = re.search(r"::(.*)\sat", Str)
return (res.group(1))
#get DimX and DimY from header in file
# lin | e = FH.readline()
# DimX = int( re.search(r"DimX=([-|\d]*)", line).group(1))
# DimY = int( re.search(r"DimY=([-|\d]*)", line).group(1))
|
antoinecarme/pyaf | tests/artificial/transf_BoxCox/trend_PolyTrend/cycle_0/ar_12/test_artificial_128_BoxCox_PolyTrend_0_12_100.py | Python | bsd-3-clause | 263 | 0.087452 | import pyaf.Bench.TS_d | atasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 0, transform = "BoxCox", | sigma = 0.0, exog_count = 100, ar_order = 12); |
miyataken999/weblate | weblate/requirements.py | Python | gpl-3.0 | 5,638 | 0 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For some reasons, this fails in PyLint sometimes...
# pylint: disable=E0611,F0401
from distutils.version import LooseVersion
from weblate.trans.vcs import GitRepository, HgRepository
import importlib
import sys
def get_version_module(module, name, url, optional=False):
'''
Returns module object, on error raises verbose
exception with name and URL.
'''
try:
mod = importlib.import_module(module)
except ImportError:
if optional:
return None
raise Exception(
'Failed to import %s, please install %s from %s' % (
module.replace('.__version__', ''),
name,
url,
)
)
return mod
def get_optional_versions():
'''
Returns versions of optional modules.
'''
result = []
name = 'pyuca'
url = 'https://github.com/jtauber/pyuca'
mod = get_version_module('pyuca', name, url, True)
if mod is not None:
result.append((
name,
url,
'N/A',
None,
))
name = 'pyLibravatar'
url = 'https://pypi.python.org/pypi/pyLibravatar'
mod = get_version_module('libravatar', name, url, True)
if mod is not None:
result.append((
name,
url,
'N/A',
None,
))
if HgRepository.is_supported():
result.append((
'Mercurial',
'http://mercurial.selenic.com/',
HgRepository.get_version(),
'2.8',
))
| return result
def get_versions():
'''
Returns list of used versions.
'''
result = []
result.append((
'Python',
'http://www.python.org/',
sys.version.split()[0],
'2.7',
))
name = 'Django'
url = 'https://www.djangoproject.com/'
mod = get_version_module('django', name, url)
result.append((
name,
url,
mod.get_version(),
'1.7',
))
na | me = 'python-social-auth'
url = 'http://psa.matiasaguirre.net/'
mod = get_version_module('social', name, url)
result.append((
name,
url,
mod.__version__,
'0.2.0',
))
name = 'Translate Toolkit'
url = 'http://toolkit.translatehouse.org/'
mod = get_version_module('translate.__version__', name, url)
result.append((
name,
url,
mod.sver,
'1.10.0',
))
name = 'Whoosh'
url = 'http://bitbucket.org/mchaput/whoosh/'
mod = get_version_module('whoosh', name, url)
result.append((
name,
url,
mod.versionstring(),
'2.5',
))
try:
result.append((
'Git',
'http://git-scm.com/',
GitRepository.get_version(),
'1.6',
))
except OSError:
raise Exception('Failed to run git, please install it.')
name = 'Pillow (PIL)'
url = 'http://python-imaging.github.io/'
mod = get_version_module('PIL.Image', name, url)
result.append((
name,
url,
mod.VERSION,
'1.1.6',
))
name = 'dateutil'
url = 'http://labix.org/python-dateutil'
mod = get_version_module('dateutil', name, url)
result.append((
name,
url,
mod.__version__,
'1.0'
))
name = 'lxml'
url = 'http://lxml.de/'
mod = get_version_module('lxml.etree', name, url)
result.append((
name,
url,
mod.__version__,
'3.1.0',
))
name = 'django-crispy-forms'
url = 'http://django-crispy-forms.readthedocs.org/'
mod = get_version_module('crispy_forms', name, url)
result.append((
name,
url,
mod.__version__,
'1.4.0',
))
name = 'compressor'
url = 'https://github.com/django-compressor/django-compressor'
mod = get_version_module('compressor', name, url)
result.append((
name,
url,
mod.__version__,
'1.5',
))
return result
def check_version(name, url, version, expected):
'''
Check for single module version.
'''
if expected is None:
return False
looseversion = LooseVersion(version)
if looseversion < expected:
print '*** %s <%s> is too old! ***' % (name, url)
print 'Installed version %s, required %s' % (version, expected)
return True
return False
def check_requirements():
'''
Performs check on requirements and raises an exception on error.
'''
versions = get_versions() + get_optional_versions()
failure = False
for version in versions:
failure |= check_version(*version)
if failure:
raise Exception(
'Some of required modules are missing or too old! '
'Check above output for details.'
)
|
dashng/netseen | manage.py | Python | apache-2.0 | 1,064 | 0 | #!/usr/bin/env python
from __future__ import print_function
import os
import sys
import unittest
from flask_script import | Manager
from netseen import create_app
from netseen.database import DataBase
sys.path.insert(0, os.getcwd())
manager = Manager(create_app)
@manager.command
def createdb(drop_first=False):
"""Creates the datab | ase."""
try:
if drop_first:
DataBase().drop_all()
DataBase().create_all()
except Exception as e:
print(e)
@manager.command
def test():
"""Runs the unit tests without coverage."""
tests = unittest.TestLoader().discover('netseen.tests', pattern='test*.py')
result = unittest.TextTestRunner(verbosity=2).run(tests)
if result.wasSuccessful():
return 0
else:
return 1
# @manager.command
# def create_admin():
# """Creates the admin user."""
# username = 'admin'
# password = 'admin'
# db.session.add(User(username=username, password=password, admin=True))
# db.session.commit()
if __name__ == '__main__':
manager.run()
|
axiome-oss/dive-into-django-i18n | your_project/your_package/models.py | Python | mit | 199 | 0.005025 | from django.db import | models
from django.contrib.auth.mode | ls import User
class Profile(models.Model):
user = models.OneToOneField(User)
description = models.TextField(blank=True, null=True)
|
Spirotot/py3status | py3status/modules/gpmdp.py | Python | bsd-3-clause | 2,411 | 0.001662 | # -*- coding: utf-8 -*-
"""
Display currently playing song from Google Play Music Desktop Player.
Configuration parameters:
cache_timeout: how often we refresh this module in seconds (default 5)
format: specify the items and ordering of the data in the status bar.
These area 1:1 match to gpmdp-remote's options (default is '♫ {info}').
Format of status string placeholders:
See `gpmdp-remote help`. Simply surround the items you want displayed (i.e. `album`)
with curly braces (i.e. `{album}`) and place as-desired in the format string.
{info} Print info about now playing song
{title} Print current song title
{artist} Print current song artist
{album} Print current song album
{album_art} Print current song album art URL
{time_current} Print current song time in milliseconds
{time_total} Print total song time in milliseconds
{status} Print whether GPMDP is paused or playing
{current} Print now playing song in "artist - song" format
{help} Print this help message
Requires:
gpmdp: http://www.googleplaymusicdesktopplayer.com/
gpmdp-remote: https://github.com/iandrewt/gpmdp-remote
@author Aaron Fields https://twitter.com/spirotot
@license BSD
"""
from time import time
from subprocess import check_output
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = u'♫ {info}'
@ | staticmethod
def _run_cmd(cmd):
return check_output(['gpmdp-remote', cmd]).decode('utf-8').strip()
def gpmdp(self, i3s_output_list, i3s_config):
if self._run_cmd('status') == 'Paused':
result = ''
else:
cmds = ['info', 'title', 'artist', 'album', 'status', 'current',
'time_total', 'time_current', 'album_art']
data = {}
for c | md in cmds:
if '{%s}' % cmd in self.format:
data[cmd] = self._run_cmd(cmd)
result = self.format.format(**data)
response = {
'cached_until': time() + self.cache_timeout,
'full_text': result
}
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
Harmon758/discord.py | examples/basic_bot.py | Python | mit | 1,925 | 0.004675 | # This example requires the 'members' privileged intents
import discord
from discord.ext import commands
import random
description = '''An example bot to showcase the discord.ext.commands extension
module.
There are a number of utility commands being showcased here.'''
intents = discord.Intents.default()
intents.members = True
bot = commands.Bot(command_prefix='?', description=description, intents=intents)
@bot.event
async def on_ready():
print(f'Logged in as {bot.user} (ID: {bot.user.id})')
print('------')
@bot.command()
async def add(ctx, left: int, right: int):
"""Adds two numbers together."""
await ctx.send(left + right)
@bot.command()
async def roll(ctx, dice: str):
"""Rolls a dice in NdN format."""
try:
rolls, limit = map(int, dice.split('d'))
except Exception:
await ctx.send('Format has to be in NdN!')
return
result = ', '.join(str(random.randint(1, limit)) for r in range(rolls))
await ctx.send(result)
@bot.command(description='For when you wanna settle the score some other way')
async def choose(ctx, *choices: str):
"""Chooses between multiple choices."""
await ctx.send(random.choice(choices))
@bot.command()
async def repeat(ctx, times: int, content='repeating...') | :
"""Repeats a message multiple times."""
for i in range(times):
await ctx.send(content) |
@bot.command()
async def joined(ctx, member: discord.Member):
"""Says when a member joined."""
await ctx.send(f'{member.name} joined in {member.joined_at}')
@bot.group()
async def cool(ctx):
"""Says if a user is cool.
In reality this just checks if a subcommand is being invoked.
"""
if ctx.invoked_subcommand is None:
await ctx.send(f'No, {ctx.subcommand_passed} is not cool')
@cool.command(name='bot')
async def _bot(ctx):
"""Is the bot cool?"""
await ctx.send('Yes, the bot is cool.')
bot.run('token')
|
bloomberg/phabricator-tools | py/phl/phlsys_signal__t.py | Python | apache-2.0 | 7,979 | 0 | """Test suite for phlsys_signal."""
# =============================================================================
# TEST PLAN
# -----------------------------------------------------------------------------
# Here we detail the things we are concerned to test and specify which tests
# cover those concerns.
#
# Concerns:
# [ A] can run phlsys_signal.set_exit_on_sigterm
# [ A] exit_level is 0 before exit contexts are active
# [ A] exit_level is 1 while single exit context is active
# [ A] exit_level is 0 after single exit contexts finishes
# [ B] exit_level is 0 before recursive exit contexts are active
# [ B] exit_level is 1 while first exit context is active
# [ B] exit_level is 2 while second exit context is active
# [ B] exit_level is 0 after recursive exit contexts are active
# [ C] exceptions can be raised through an exit context
# [ C] exit_level is 0 after raising through an exit context
# [ D] exceptions cannot be raised through a triggered exit context
# [ D] exit_level is 0 after raising through triggered exit context
# [ E] cannot raise through nested triggered exit contexts
# [ E] exit_level=0 after raising through nested triggered contexts
# [ F] After SIGTERM is received, exit contexts raise SystemExit
# [ F] exit_level is 0 after a triggered exit context
# [ G] When nesting exit contexts, exit after SIGTERM is received
# [ G] When nesting exit contexts, finish after SIGTERM is received
# [ G] exit_level is 0 after triggered nested exit contexts finish
# -----------------------------------------------------------------------------
# Tests:
# [ A] test_A_Breathing
# [ B] test_B_can_recurse_exit_context
# [ C] test_C_can_raise_through_exit_context
# [ D] test_D_cant_raise_through_triggered_exit_context
# [ E] test_E_cant_raise_through_nested_triggered_exit_context
# [ F] test_F_after_sigterm_exit_contexts_do_exit
# [ G] test_G_after_sigterm_inner_exit_contexts_do_not_exit
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import signal
import unittest
import phlsys_signal
class Test(unittest.TestCase):
def __init__(self, data):
super(Test, self).__init__(data)
def test_A_Breathing(self):
# We must resto | re the original signal handler when we're done testing
# or nose will hang indefinitely when we run the tests.
handler = signal.getsig | nal(signal.SIGTERM)
# CONCERN: can run phlsys_signal.set_exit_on_sigterm
phlsys_signal.set_exit_on_sigterm()
# CONCERN: exit_level is 0 before exit contexts are active
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
with phlsys_signal.no_exit_context():
# CONCERN: exit_level is 1 while single exit context is active
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
1)
# CONCERN: exit_level is 0 after single exit contexts finishes
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
signal.signal(signal.SIGTERM, handler)
def test_B_can_recurse_exit_context(self):
# CONCERN: exit_level is 0 before recursive exit contexts are active
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
with phlsys_signal.no_exit_context():
# CONCERN: exit_level is 1 while first exit context is active
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
1)
with phlsys_signal.no_exit_context():
# CONCERN: exit_level is 2 while second exit context is active
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
2)
# CONCERN: exit_level is 0 after recursive exit contexts are active
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
def test_C_can_raise_through_exit_context(self):
class RaiseThroughExitError(Exception):
pass
# CONCERN: exceptions can be raised through an exit context
with self.assertRaises(RaiseThroughExitError):
with phlsys_signal.no_exit_context():
raise RaiseThroughExitError()
# CONCERN: exit_level is 0 after raising through an exit context
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
def test_D_cant_raise_through_triggered_exit_context(self):
class RaiseThroughExitError(Exception):
pass
# CONCERN: exceptions cannot be raised through a triggered exit context
with self.assertRaises(SystemExit):
with phlsys_signal.no_exit_context():
phlsys_signal._SIGNAL_FLAGS.got_sigterm = True
raise RaiseThroughExitError()
# CONCERN: exit_level is 0 after raising through triggered exit context
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
def test_E_cant_raise_through_nested_triggered_exit_context(self):
class RaiseThroughExitError(Exception):
pass
# CONCERN: cannot raise through nested triggered exit contexts
with self.assertRaises(SystemExit):
with phlsys_signal.no_exit_context():
with phlsys_signal.no_exit_context():
phlsys_signal._SIGNAL_FLAGS.got_sigterm = True
raise RaiseThroughExitError()
# CONCERN: exit_level=0 after raising through nested triggered contexts
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
def test_F_after_sigterm_exit_contexts_do_exit(self):
# CONCERN: After SIGTERM is received, exit contexts raise SystemExit
with self.assertRaises(SystemExit):
with phlsys_signal.no_exit_context():
phlsys_signal._SIGNAL_FLAGS.got_sigterm = True
# CONCERN: exit_level is 0 after a triggered exit context
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
def test_G_after_sigterm_inner_exit_contexts_do_not_exit(self):
did_finish = False
# CONCERN: When nesting exit contexts, exit after SIGTERM is received
with self.assertRaises(SystemExit):
with phlsys_signal.no_exit_context():
with phlsys_signal.no_exit_context():
with phlsys_signal.no_exit_context():
phlsys_signal._SIGNAL_FLAGS.got_sigterm = True
did_finish = True
# CONCERN: When nesting exit contexts, finish after SIGTERM is received
self.assertTrue(did_finish)
# CONCERN: exit_level is 0 after triggered nested exit contexts finish
self.assertEqual(
phlsys_signal._SIGNAL_FLAGS.delay_sigterm_exit_level,
0)
# -----------------------------------------------------------------------------
# Copyright (C) 2016 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
wolfe-pack/moro | public/javascripts/brat/testserver.py | Python | bsd-2-clause | 2,109 | 0.002371 | #!/usr/bin/env python
'''
Run brat using the built-in Python CGI server for testing purposes.
Author: Pontus Stenetorp <pontus stenetorp se>
Version: 2012-07-01
'''
from BaseHTTPServer import HTTPServer, test as simple_http_server_test
from CGIHTTPServer import CGIHTTPRequestHandler
# Note: It is a terrible idea to import the function below, but we don't have
# a choice if we want to emulate the super-class is_cgi method.
from CGIHTTPServer import _url_collapse_path_split
from sys import stderr
from urlparse import urlparse
# Note: The only reason that we sub-class in order to pull is the stupid
# is_cgi method that assumes the usage of specific CGI directories, I simply
# refuse to play along with this kind of non-sense.
class BRATCGIHTTPRequestHandler(CGIHTTPRequestHandler):
def is_cgi(self):
# Having a CGI suffix is really a big hint of being a CGI script.
if urlparse(self.path).path.endswith('.cgi'):
self.cgi_info = _url_collapse_path_split(self.path)
return True
else:
return CGIHTTPRequestHandler.is_cgi(self)
def main(args):
# BaseHTTPServer will look for the port in argv[1] or default to 8000
try:
try:
port = int(args[1])
except ValueError:
raise TypeError
except TypeError:
print >> stderr, '%s is not a valid port number' % args[1]
| return -1
except IndexError:
port = 8000
print >> stderr, 'WARNING: This server is for testing purposes only!'
print >> stderr, (' You can also use it for trying out brat before '
'deploying on | a "real" web server such as Apache.')
print >> stderr, (' Using this web server to run brat on an open '
'network is a security risk!')
print >> stderr
print >> stderr, 'You can access the test server on:'
print >> stderr
print >> stderr, ' http://localhost:%s/' % port
print >> stderr
simple_http_server_test(BRATCGIHTTPRequestHandler, HTTPServer)
if __name__ == '__main__':
from sys import argv
exit(main(argv))
|
nkantar/Parsenvy | tests/test_list.py | Python | bsd-3-clause | 648 | 0 | import parsenvy
def test_list_several(monkeypatch):
monkeypatch.setenv("foo", "bar,baz,barf")
assert parsenvy.list("foo") == ["bar", "baz", "barf"]
def test_list_one(monkeypatch):
monkeypatch.setenv("foo", "bar")
assert parsenvy.list("foo") == ["bar"]
def test_list_one_comma(monkeypatch):
monkeypatch.setenv("foo", ",")
assert parsenvy.list( | "foo") == ["", ""]
def test_list_multiple_commas(monkeypatch):
monkeypatch.setenv("foo", ",,,")
assert parsenvy.list("foo") == ["", "", "", ""]
def test_list_empty(monkey | patch):
monkeypatch.setenv("foo", "")
assert parsenvy.list("foo", ["bar"]) == ["bar"]
|
xobb1t/django-loginza-auth | test_project/urls.py | Python | isc | 353 | 0 | from django.conf.urls.defaults import include, patterns, url
from django.views.ge | neric import TemplateView
urlpatterns = patterns(
'',
url(r'^$', TemplateView.as_view(template_name='index.html'),
name='login'),
url(r'^logout/$', 'django. | contrib.auth.views.logout', name='logout'),
url(r'^loginza/', include('loginza.urls')),
)
|
mingkaic/rocnnet | app/pydemo/gym_demo.py | Python | mit | 1,420 | 0.016901 | #!/usr/bin/env python
import _init_paths
import gym
from tf_rl.controller import DiscreteDeepQ, NL
specname = 'CartPole-v0'
serializedname = 'dqntest_'+specname+'.pbx'
spec = gym.spec(specname)
env = spec.make()
episode_count = 250
max_steps = 10000
action_space = env.ac | tion_space
maxaction = action_space.n
observation_space = env.observation_space
maxobservation = observation_space.shape[0]
batchsize = 32 # store at least 12 times before training, each looking at 12 action-observation
controller = DiscreteDeepQ(maxobservation, [128, 128, maxaction],
[NL.TANH, NL.TANH, NL.IDENTITY], learning_rate=0.001,
decay=0.9, minibatch_size=batchsize | , discount_rate=0.99,
exploration_period=5000, max_experience=10000, )
controller.initialize(serializedname)
# training step
for ep in xrange(episode_count):
observation = env.reset()
reward = done = None
total_reward = 0
nsteps = 0
for step_it in range(max_steps):
action = controller.action(observation)
new_observation, reward, done, _ = env.step(action)
controller.store(observation, action, reward, new_observation)
controller.training_step()
observation = new_observation
total_reward = total_reward + reward
# env.render()
nsteps = step_it # record step iteration since episode can end early
if done:
break
print 'episode {}: total reward of {} in {} steps'.format(ep, total_reward, nsteps+1)
# controller.save(serializedname)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.