repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
upTee/upTee | uptee/wsgi.py | Python | bsd-3-clause | 1,132 | 0.000883 | """
WSGI config for uptee project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should ex | pose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or | combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "uptee.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
koroshiya/JPyReader | rarfile/setup.py | Python | mit | 961 | 0.023933 | #! /usr/bin/env python
from distutils.core import setup
import rarfile
ver = rarfile.__version__
ldesc = open("README.rst").read().strip()
sdesc = ldesc.split('\n')[0].split(' - ')[1].strip()
setup(
name = "rarfile",
version = ver,
description = sdesc,
long_description = ldesc,
author = "Marko Kreen",
license = "ISC",
author_email = "markokr@gmail.com",
url = "https://github.com/markokr/rarfile",
py_modules = ['rarfile'],
keywords = ['rar', 'unrar', 'archive'],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Ap | proved :: ISC License (ISCL)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Archiving :: Com | pression",
]
)
|
jordic/django_tiny_shop | demo/ipy_user_conf.py | Python | bsd-3-clause | 632 | 0.001582 | """
Makes IPython import all of your projects models when shell is started.
1. Save as ipy_user_conf.py in project root
2. | ./manage.py shell
3. profit
"""
import IPython.ipapi
ip = IPython.ipapi.get()
def main():
imported = "\nImported models:\n\n"
try:
from django.db.models.loading import get_models
for m in get_models():
try:
ip.ex("from %s import %s" % (m.__module__, m.__name__))
imported += "%s.%s\n" % (m.__module__, m.__name__)
| except ImportError:
pass
print imported
except ImportError:
pass
main() |
trizt/osquery | tools/formatting/git-clang-format.py | Python | bsd-3-clause | 18,615 | 0.000967 | #!/usr/bin/env python
#
#===- git-clang-format - ClangFormat Git Integration ---------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LLVM-LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
r"""
clang-format git integration
============================
This file provides a clang-format integration for git. Put it somewhere in your
path and ensure that it is executable. Then, "git clang-format" will invoke
clang-format on the changes in current files or a specific commit.
For further details, run:
git clang-format -h
Requires Python 2.7
"""
import argparse
import collections
import contextlib
import errno
import os
import re
import subprocess
import sys
usage = 'git clang-format [OPTIONS] [<commit>] [--] [<file>...]'
desc = '''
Run clang-format on all lines that differ between the working directory
and <commit>, which defaults to HEAD. Changes are only applied to the working
directory.
The following git-config settings set the default of the corresponding option:
clangFormat.binary
clangFormat.commit
clangFormat.extension
clangFormat.style
'''
# Name of the temporary index file in which save the output of clang-format.
# This file is created within the .git directory.
temp_index_basename = 'clang-format-index'
Range = collections.namedtuple('Range', 'start, count')
def main():
config = load_git_config()
# In order to keep '--' yet allow options after positionals, we need to
# check for '--' ourselves. (Setting nargs='*' throws away the '--', while
# nargs=argparse.REMAINDER disallows options after positionals.)
argv = sys.argv[1:]
try:
idx = argv.index('--')
except ValueError:
dash_dash = []
else:
dash_dash = argv[idx:]
argv = argv[:idx]
default_extensions = ','.join([
# From clang/lib/Frontend/FrontendOptions.cpp, all lower case
'c', 'h', # C
'm', # ObjC
'mm', # ObjC++
'cc', 'cp', 'cpp', 'c++', 'cxx', 'hpp', # C++
# Other languages that clang-format supports
'proto', 'protodevel', # Protocol Buffers
'js', # JavaScript
])
p = argparse.ArgumentParser(
usage=usage, formatter_class=argparse.RawDescriptionHelpFormatter,
description=desc)
p.add_argument('--binary',
default=config.get('clangformat.binary', 'clang-format'),
help='path to clang-format'),
p.add_argument('--commit',
default=config.get('clangformat.commit', 'HEAD'),
help='default commit to use if none is specified'),
p.add_argument('--diff', action='store_true',
help='print a diff instead of applying the changes')
p.add_argument('--extensions',
default=config.get('clangformat.extensions',
default_extensions),
help=('comma-separated list of file extensions to format, '
'excluding the period and case-insensitive')),
p.add_argument('-f', '--force', action='store_true',
help='allow changes to unstaged files')
p.add_argument('-p', '--patch', action='store_true',
help='select hunks interactively')
p.add_argument('-q', '--quiet', action='count', default=0,
help='print less information')
p.add_argument('--style',
default=config.get('clangformat.style', None),
help='passed to clang-format'),
p.add_argument('-v', '--verbose', action='count', default=0,
help='print extra information')
# We gather all the remaining positional arguments into 'args' since we need
# to use some heuristics to determine whether or not <commit> was present.
# However, to print pretty messages, we make use of metavar and help.
p.add_argument('args', nargs='*', metavar='<commit>',
help='revision from which to compute the diff')
p.add_argument('ignored', nargs='*', metavar='<file>...',
help='if specified, only consider differences in these files')
opts = p.parse_args(argv)
opts.verbose -= opts.quiet
del opts.quiet
commit, files = interpret_args(opts.args, dash_dash, opts.commit)
changed_lines = compute_diff_and_extract_lines(commit, files)
if opts.verbose >= 1:
ignored_files = set(changed_lines)
filter_by_extension(changed_lines, opts.extensions.lower().split(','))
if opts.verbose >= 1:
ignored_files.difference_update(changed_lines)
if ignored_files:
print 'Ignoring changes in the following files (wrong extension):'
for filename in ignored_files:
print ' ', filename
if changed_lines:
print 'Running clang-format on the following files:'
for filename in changed_lines:
print ' ', filename
if not changed_lines:
print 'no modified files to format'
return
# The computed diff outputs absolute paths, so we must cd before accessing
# those files.
cd_to_toplevel()
old_tree = create_tree_from_workdir(changed_lines)
new_tree = run_clang_format_and_save_to_tree(changed_lines,
binary=opts.binary,
style=opts.style)
if opts.verbose >= 1:
print 'old tree:', old_tree
print 'new tree:', new_tree
if old_tree == new_tree:
if opts.verbose >= 0:
print 'clang-format did not modify any files'
elif opts.diff:
print_diff(old_tree, new_tree)
else:
changed_files = apply_changes(old_tree, new_tree, force=opts.force,
patch_mode=opts.patch)
if (opts.verbose >= 0 and not opts.patch) or opts.verbose >= 1:
print 'changed files:'
for filename in changed_files:
print ' ', filename
def load_git_config(non_string_options=None):
"""Return the git configuration as a dictionary.
All options are assumed to be strings unless in `non_string_options`, in which
is a dictionary mapping option name (in lower case) to either "--bool" or
"--int"."""
if non_string_options is None:
non_string_options = {}
out = {}
for entry in run('git', 'config', '--list', '--null').split('\0'):
if entry:
name, value = entry.split('\n', 1)
if name in non_string_options:
value = run('git', 'config', non_string_options[name], name)
out[name] = value
return out
def interpret_args(args, dash_dash, default_commit):
"""Interpret `args` as "[commit] [--] [files...]" and return (commit, files).
It is assumed that "--" and everything that follows has been rem | oved from
args and placed in `dash_dash`.
If "--" is present (i.e., `dash_dash` is non-empty), the argument to its
left (if present) is taken as commit. Otherwise, the first argument is
checked if it is a commit or a file. If commit is not given,
`default_commit` | is used."""
if dash_dash:
if len(args) == 0:
commit = default_commit
elif len(args) > 1:
die('at most one commit allowed; %d given' % len(args))
else:
commit = args[0]
object_type = get_object_type(commit)
if object_type not in ('commit', 'tag'):
if object_type is None:
die("'%s' is not a commit" % commit)
else:
die("'%s' is a %s, but a commit was expected" %
(commit, object_type))
files = dash_dash[1:]
elif args:
if disambiguate_revision(args[0]):
commit = args[0]
files = args[1:]
else:
commit = default_commit
files = args
else:
commit = default_commit
files = []
return commit, files
def disambig |
Samsung/ADBI | arch/arm/tests/arm_dp_imm.py | Python | apache-2.0 | 1,486 | 0.010767 | import random
from test import *
from branch import *
INSN = 'and eor sub rsb add adc sbc rsc tst teq cmp cmn orr mov bic mvn'.split()
NORN = 'mov m | vn'.split()
NORD = 'cmp cmn tst teq'.split()
def rotate(val, c):
return ((val >> c) | (val << (32 - c))) & 0xffffffff
def test(insn, s, flags, rd, rn, rnval, imm8, rot):
name = 'test_dp_imm_%s' % tn()
cleanup = asm_wrap(name, rd, {rn:rnval}, flags)
print '%s_tinsn:' % name
if 1:
if insn in NORN:
print ' %s%s %s, #%i, %i' % (insn, s, rd, imm8, rot)
elif insn in NORD:
print ' %s %s, #%i, | %i' % (insn, rn, imm8, rot)
else:
print ' %s%s %s, %s, #%i, %i' % (insn, s, rd, rn, imm8, rot)
else:
v = rotate(imm8, rot)
if insn in NORN:
print ' %s%s %s, #%i // %x ror %i ' % (insn, s, rd, v, imm8, rot)
elif insn in NORD:
print ' %s %s, #%i // %x ror %i ' % (insn, rn, v, imm8, rot)
else:
print ' %s%s %s, %s, #%i // %x ror %i ' % (insn, s, rd, rn, v, imm8, rot)
cleanup()
def iter_cases():
while True:
yield (random.choice(INSN), random.choice(['s', '']),
random.randint(0, 0x1f), random.choice(T32REGS),
random.choice(ALLREGS), random.randint(0, 0xffffffff),
random.randint(0, 0xff), random.randint(0, 0xf) * 2)
print ' .arm'
tests(test, iter_cases(), 300)
|
tst-ahernandez/earthenterprise | earth_enterprise/src/server/wsgi/serve/snippets/data/metainfo_by_fieldpath.py | Python | apache-2.0 | 24,676 | 0.001621 | #!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""All sorts of properties for every field.
Generated by ./generate-onetime-js-widget-data.py
AMD-style module definition.
Note: No leading, internal path before the [], since we'd have to hardwire it
to data/whatever-fns.js which is inflexible.
"""
# TODO: Clean up the description fields that actually contain data
# extracted from the comments of dbroot_v2.proto.
# The META_INFO contains already vetted snippets.
META_INFO = r"""
{
"end_snippet.bbs_server_info.base_url:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.base_url:value",
"default_value": null,
"description": "URL of the server including protocol, domain and port. Can be translated if we use different servers for different languages.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.base_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "base_url:value",
"presence": "optional",
"short_label": "base_url",
"typ": "string"
},
"end_snippet.bbs_server_info.file_submit_path:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.file_submit_path:value",
"default_value": null,
"description": "Path on server where files can be submitted.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.file_submit_path:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "file_submit_path:value",
"presence": "optional",
"short_label": "file_submit_path",
"typ": "string"
},
"end_snippet.bbs_server_info.name:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.name:value",
"default_value": null,
"description": "Name that will be displayed in context menu to user. Must be translated.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.name:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "name:value",
"presence": "optional",
"short_label": "name",
"typ": "string"
},
"end_snippet.bbs_server_info.post_wizard_path:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.post_wizard_path:value",
"default_value": null,
"description": "Path on server where wizard can be found.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.post_wizard_path:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "post_wizard_path:value",
"presence": "optional",
"short_label": "post_wizard_path",
"typ": "string"
},
"end_snippet.client_options.disable_disk_cache": {
"abstract_fieldpath": "end_snippet.client_options.disable_disk_cache",
"default_value": null,
"description": "If true, no data will be cached on disk for this database. It will not be accessible offline.",
"empty_concrete_fieldpath": "end_snippet.client_options.disable_disk_cache",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "disable_disk_cache",
"presence": "optional",
"short_label": "disable_disk_cache",
"typ": "bool"
},
"end_snippet.cobrand_info.logo_url": {
"abstract_fieldpath": "end_snippet.cobrand_info.logo_url",
"default_value": null,
"description": "URL of image to use as logo. Can be remote or local. However, using local URLs depends on the installation of the client and should be used carefully.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].logo_url",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "logo_url",
"presence": "required",
"short_label": "logo_url",
"typ": "string"
},
"end_snippet.cobrand_info.screen_size": {
"abstract_fieldpath": "end_snippet.cobrand_info.screen_size",
"default_value": "0.0",
"description": "If specified and strictly positive but <= 1.0, makes logo scalable with screen by forcing its width to occupy a fixed fraction of the screeen. For instance, a value of .25 makes the given logo occupy 25% of the screen.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].screen_size",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "screen_size",
"presence": "optional",
"short_label": "screen_size",
"typ": "double"
},
"end_snippet.cobrand_info.tie_point": {
"abstract_fieldpath": "end_snippet.cobrand_info.tie_point",
"default_value": "BOTTOM_LEFT",
"description": "Controls reference point in overlay.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].tie_point",
"enum_vals": {
"BOTTOM_CENTER": 7,
"BOTTOM_LEFT": 6,
"BOTTOM_RIGHT": 8,
"MID_CENTER": 4,
"MID_LEFT": 3,
"MID_RIGHT": 5,
"TOP_CENTER": 1,
"TOP_LEFT": 0,
"TOP_RIGHT": 2
},
"js_validation_rule": {
"required": true
},
"name": "tie_point",
"presence": "optional",
"short_label": "tie_point",
"typ": "TiePoint"
},
"end_snippet.cobrand_info.x_coord.is_relative": {
"abstract_fieldpath": "end_snippet.cobrand_info.x_coord.is_relative",
"default_value": "false",
"description": "If true, the coordinate is relative to the screen.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].x_coord.is_relative",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "is_relative",
"presence": "optional",
"short_label": "is_relative",
"typ": "bool"
},
"end_snippet.cobrand_info.x_coord.value": {
"abstract_fieldpath": "end_snippet.cobrand_info.x_coord.value",
"default_value": "0.0",
"description": "Coordinate value. Interpretation depends on is_relative (absolute or",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].x_coord.value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "value",
"presence": "required",
"short_label": "value",
"typ": "double"
},
"end_snippet.cobrand_info.y_coord.is_relative": {
"abstract_fieldpath": "end_snippet.cobrand_info.y_coord.is_relative",
"default_value": "false",
"description": "If true, the coordinate is relative to the screen.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].y_coord.is_relative",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "is_relative",
"presence": "optional",
"short_label": "is_relative",
"typ": "bool"
},
"end_snippet.cobrand_info.y_coord.value": {
"abstract_fieldpath": "end_snippet.cobrand_info.y_coord.value",
"default_value": "0.0",
"description": "Coordinate value. Interpretation depends on is_relative (absolute or",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].y_coord.value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "value",
"presence": "required",
"short_label": "value",
"typ": "double"
},
"end_snippet.default_web_p | age_intl_url:value": {
"abstract_fieldpath": "end_snippet.default_web_page_ | intl_url:value",
"default_value": null,
"description": "Default location of web page.",
"empty_concrete_fieldpath": "end_snippet.default_web_page_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "default_web_page_intl_url:value",
"presence": "optional",
"short_label": "default_web_page_intl_url",
"typ": "string"
},
"end_snippet.earth_intl_url:value": {
"abstract_fieldpath" |
hexlism/css_platform | sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/geoa/fields.py | Python | apache-2.0 | 2,340 | 0.005983 | import json
from wtforms.fields import TextAreaField
from shapely.geometry import shape, mapping
from .widgets import LeafletWidget
from sqlalchemy import func
import geoalchemy2
#from types import NoneType
#from .. import db how do you get db.session in a Field?
class JSONField(TextAreaField):
def _value(self):
if self.raw_data:
return self.raw_data[0]
if self.data:
return self.data
return ""
def process_formdata(self, valuelist): |
if valuelist:
value = valuelist[0]
if not value:
self.data = None
return
try:
self.data = self.from_json(value)
except ValueError:
| self.data = None
raise ValueError(self.gettext('Invalid JSON'))
def to_json(self, obj):
return json.dumps(obj)
def from_json(self, data):
return json.loads(data)
class GeoJSONField(JSONField):
widget = LeafletWidget()
def __init__(self, label=None, validators=None, geometry_type="GEOMETRY", srid='-1', session=None, **kwargs):
super(GeoJSONField, self).__init__(label, validators, **kwargs)
self.web_srid = 4326
self.srid = srid
if self.srid is -1:
self.transform_srid = self.web_srid
else:
self.transform_srid = self.srid
self.geometry_type = geometry_type.upper()
self.session = session
def _value(self):
if self.raw_data:
return self.raw_data[0]
if type(self.data) is geoalchemy2.elements.WKBElement:
if self.srid is -1:
self.data = self.session.scalar(func.ST_AsGeoJson(self.data))
else:
self.data = self.session.scalar(func.ST_AsGeoJson(func.ST_Transform(self.data, self.web_srid)))
return super(GeoJSONField, self)._value()
def process_formdata(self, valuelist):
super(GeoJSONField, self).process_formdata(valuelist)
if str(self.data) is '':
self.data = None
if self.data is not None:
web_shape = self.session.scalar(func.ST_AsText(func.ST_Transform(func.ST_GeomFromText(shape(self.data).wkt, self.web_srid), self.transform_srid)))
self.data = 'SRID='+str(self.srid)+';'+str(web_shape)
|
fevxie/odoo-infrastructure | infrastructure/models/server_configuration.py | Python | agpl-3.0 | 964 | 0 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import mo | dels, fields
class server_configuration(models.M | odel):
""""""
_name = 'infrastructure.server_configuration'
_description = 'server_configuration'
name = fields.Char(
string='Name',
required=True
)
distrib_codename = fields.Char(
string='Distribution Codename',
required=True
)
install_command_ids = fields.One2many(
'infrastructure.server_configuration_command',
'server_configuration_id',
string='Installation Commands',
)
server_ids = fields.One2many(
'infrastructure.server',
'server_configuration_id',
string='server_ids'
)
|
reinaldoc/dbsync | source/converter/StringReplace.py | Python | gpl-2.0 | 612 | 0.03268 | """
StringReplace - a converter to replace section(s) of a string to strings passed
as parameters
"""
from util.Strings import Strings
class StringReplace(object):
def __init__(self, data, args=()):
"""
Receive the data to be converted and parameters.
"""
| self.value = data
if not data:
return
try:
s_from = args[0]
s_to = args[1]
self.value = Strings.replace_string(data, s_from | , s_to)
except IndexError:
print("Error: StringReplace takes exactly 2 arguments (%s given): %s" % (len(args), args) )
except Exception, e:
print(e)
def get_value(self):
return self.value
|
hasgeek/funnel | migrations/versions/a23e88f06478_add_commentset_fields.py | Python | agpl-3.0 | 1,439 | 0.00139 | """Add commentset fields.
Revision ID: a23e88f06478
Revises: 284c10efdbce
Create Date: 2021-03-22 02:54:30.416806
"""
from alembic import op
fr | om s | qlalchemy.sql import column, table
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a23e88f06478'
down_revision = '284c10efdbce'
branch_labels = None
depends_on = None
commentset = table(
'commentset',
column('id', sa.Integer()),
column('last_comment_at', sa.TIMESTAMP(timezone=True)),
)
comment = table(
'comment',
column('id', sa.Integer()),
column('created_at', sa.TIMESTAMP(timezone=True)),
column('commentset_id', sa.Integer()),
)
def upgrade():
op.add_column(
'commentset',
sa.Column('last_comment_at', sa.TIMESTAMP(timezone=True), nullable=True),
)
op.add_column(
'commentset_membership',
sa.Column(
'is_muted',
sa.Boolean(),
nullable=False,
server_default=sa.sql.expression.false(),
),
)
op.alter_column('commentset_membership', 'is_muted', server_default=None)
op.execute(
commentset.update().values(
last_comment_at=sa.select([sa.func.max(comment.c.created_at)]).where(
comment.c.commentset_id == commentset.c.id
)
)
)
def downgrade():
op.drop_column('commentset_membership', 'is_muted')
op.drop_column('commentset', 'last_comment_at')
|
pombredanne/jkarn-pub-test | udfs/python/coffee.py | Python | apache-2.0 | 813 | 0.01353 | from pig_util import outputSchema
COFFEE_SNOB_PHRASES = set((\
'espresso', 'cappucino', 'macchiato', 'latte', 'cortado', 'pour over', 'barista',
'flat white', 'siphon pot', 'woodneck', 'french press', 'arabica', 'chemex',
'frothed', 'la marzocco', 'mazzer', 'la pavoni', 'nespresso', 'rancilio silvia', 'hario',
'intelligentsia', 'counter culture', 'barismo', 'sightglass', 'blue bottle', 'stumptown',
'single origin', 'coffee beans', 'coffee grinder', 'lavazza', 'coffeegeek'\
))
@outputSchema('is_coffee_tweet:int | ')
def is_coffee_tweet(text):
"""
Is the given text indicative of coffee snobbery?
"""
if not text:
retu | rn 0
lowercased = set(text.lower().split())
return 1 if any((True for phrase in COFFEE_SNOB_PHRASES if phrase in lowercased)) else 0
|
ulikoehler/entropy-analysis-tools | RandGen/scripts/lc.py | Python | gpl-3.0 | 1,933 | 0.047077 | #!/usr/bin/env python
#This is a runtime-configurable implementation of the
#linear congruentialpseudo-random number generator,
#using the following formula:
#y = (a*z+b) mod m
#where:
#z = the last generate result (or the seed at startup)
#a,b,m = parameters (defaults generated randomly)
#
#Numbers created by the LC algorithm are NOT indended
#to be used for cryptographical purposes.
#Note that the LC is a lot slower than almost all other
#algorithms, especially the MersenneTwister.
from __future__ import with_statement
from random import *
from decimal import *
from math import *
import sys
from optparse import OptionParser
#Generate random defaults for the option parser
randm = randint()
randa = randint(randm)
randb = randint(randm)
randseed
#Initlialize the option parser
parser = OptionParser()
parser.enable_interspersed_args()
parser.add_option("-c",
"--count",
type="int",
action="store",
dest="count",
help= | "How many random numbers to generate")
parser.add_o | ption("-o",
"--out",
dest="outfileName",
help="Output file")
parser.add_option("-a",
"--parama",
type="long",
dest="a",
help="Parameter a (multiplier)")
parser.add_option("-b",
"--paramb",
type="long",
dest="b",
help="Parameter b (increment)")
parser.add_option("-m",
"--paramm",
type="long",
dest="m",
help="Parameter m (modulus)")
parser.add_option("-s",
"--seed",
type="long",
dest="seed",
help="Seed (= last generator result)")
#Set defaults
parser.set_defaults(outfileName="rand.txt",
count=10000,
a=randa,
b=randb,
m=randm,
seed=randseed
)
#Parse
(options,args) = parser.parse_args()
#Global paramter aliases
a = options.a
b = options.b
m = options.m
lastres = options.seed
with open(options.outfileName,"w") as outfile:
for i in xrange(options.count):
lastres = (a * lastres + b) % m
print >> outfile,lastres |
TOC-Shard/moul-scripts | Python/xUserKIBase.py | Python | gpl-3.0 | 26,723 | 0.007409 | # -*- coding: utf-8 -*-
#==============================================================================#
# #
# Offline KI #
# #
# Copyright (C) 2004-2011 The Offline KI contributors #
# See the file AUTHORS for more info about the contributors (including #
# contact information) #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version, with or (at your option) without #
# the Uru exception (see below). #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# Please see the file COPYING for the full GPLv3 license, or see #
# <http://www.gnu.org/licenses/> #
# #
# Uru exception: In addition, this file may be used in combination with #
# (non-GPL) code within the context of Uru. #
# #
#==============================================================================#
import string
import os
from Plasma import *
from PlasmaTypes import *
from PlasmaKITypes import *
from PlasmaVaultConstants import *
from PlasmaNetConstants import *
from xPsnlVaultSDL import *
import xUserKI
import xUserKIConfig
# Global info variables and configuration
gGotoPlaces = {
'city': {
'ferry': [-216, -591, 9],
'gallery': [168, -490, 163],
'library': [792, -597, 260],
'takotah': [0, -97, 221],
'rooftop': [-60, -195, 275]
},
'Ercana': {
'brokentrail': [-528, -964, -57],
'factory': [0, -146, -13],
'start': [-497, -588, -45],
'trailend': [-886, -728, -48],
'pellets': [0, 750, 80]
},
'Kadish': {
'gallerylink': [48, 181, 15],
'pyramid': [736, -121, 3],
'vault': [1182, 210, 9]
},
'Minkata': {
'cage': [-37, 1018, 14],
'cave1': [106, 1402, -13],
'cave2': [1515, 655, -13],
'cave3': [-1021, 1864, -13],
'cave4': [-885, -546, -13],
'cave5': [-1452, 1069, -13]
}
}
gColorNames = ['black', 'blue', 'brown', 'cyan', 'darkbrown', 'darkgreen', 'darkpurple', 'gray', 'green', 'magenta', 'maroon', 'navyblue', 'orange', 'pink', 'red', 'slateblue', 'steelblue', 'tan', 'white', 'yellow']
# Global helper variables
gLastSpawnPos = []
gCommandList = []
# Helper functions
def StrToColor(str):
color = None
exec('color = ptColor().%s()' % str.lower())
return color
def FilterFilename(filename):
filename = string.replace(filename, '<', '')
filename = string.replace(filename, '>', '')
filename = string.replace(filename, '/', '')
filename = string.replace(filename, '\\', '')
filename = string.replace(filename, '*', '')
filename = string.replace(filename, '?', '')
return filename
def ExportFile(ki, dirname, element):
datatype = element.getType()
# text notes
if datatype == PtVaultNodeTypes.kTextNoteNode:
element = element.upcastToTextNoteNode()
if type(element) != type(None):
filename = FilterFilename(str(element.getID()) + ' - ' + element.getTitle() + '.txt')
saveFile = open((dirname + '\\' + filename), 'w')
saveFile.write(element.getText())
saveFile.close()
ki.AddChatLine(None, 'KI Text saved as %s' % filename, 0)
return
# images
elif datatype == PtVaultNodeTypes.kImageNode:
element = element.upcastToImageNode()
if type(element) != type(None):
filename = FilterFilename(str(element.getID()) + ' - ' + element.getTitle() + '.jpg')
element.imageGetImage().saveAsJPEG((dirname + '\\' + filename), 80)
ki.AddChatLine(None, 'Image saved as %s' % filename, 0)
return
# unknown type
ki.DisplayErrorMessage('This KI element can not be exported - only marker missions, pictures and text notes are supported')
# Callback functions
def OnTimer(ki, id):
if id == xUserKI.kEnableTimer:
PtForceCursorShown()
PtSendKIMessage(kEnableKIandBB, 0)
return True
return False
def OnDefaultKey(ki, isShift, isCtrl, keycode):
# Don't use Scroll Lock here, for some reason it gets send twice as often as it should
# Scripted commands
if isCtrl and not isShift and keycode == 19: # Pause
global gCommandList
if len(gCommandList):
ki.SendMessage(gCommandList[0], silent=True)
gCommandList.remove(gCommandList[0])
# don't tell user about last command... it would show the KI
return
def OnLinkingOut(ki):
global gLastSpawnPos
# reset spawn position
gLastSpawnPos = []
def OnAvatarSpawn(ki):
global gLastSpawnPos
pos = PtGetLocalAvatar().position()
gLastSpawnPos = [pos.getX(), pos.getY(), pos.getZ()]
# Main function
def OnCommand(ki, arg, cmnd, args, playerList, silent):
# base commands
if cmnd == 'export':
element = xUserKI.KIManager.BKCurrentContent
if type(element) != type(None): element = xUserKI.KIManager.BKCurrentContent.getChild()
if type(element) == type(None):
ki.DisplayErrorMessage('You must have a KI element selected to use this command')
return True
dirname = 'export'
if not os.path.exists(dirname): os.mkdir(dirname)
ExportFile(ki, dirname, element)
return True
if cmnd == 'kilight':
(valid, time) = xUserKI.GetArg(ki, cmnd, args, 'time in seconds (default: 60)',
lambda args: len(args) == 1, lambda args: int(args[0]),
lambda args: len(args) == 0, lambda args: int(60))
if not valid: return True
xUserKI.KIManager.DoKILight(1, 1, time)
if not silent: ki.AddChatLine(None, 'You KI will light for %i seconds' % time, 0)
return True
if (cmnd == 'clearcam'):
PtClearCameraStack()
if not silent: ki.AddChatLine(None, 'Successfully cleared the camera stack', 0)
return True
if (cmnd == 'enablefp'):
cam = ptCamera()
cam.enableFir | stPersonOverride()
if not silent: ki.AddChatLine(None, '1st person switchi | ng enabled', 0)
return True
# link commands
if (cmnd == 'hood'):
gender = PtGetLocalAvatar().avatar.getAvatarClothingGroup()
if (gender == 0): hisher = 'his'
elif (gender == 1): hisher = 'her'
else: hisher = 'the'
if not silent: ki.DisplayStatusMessage('%s is linking to %s neighborhood' % (PtGetClientName(), hisher), 1)
linkMgr = ptNetLinkingMgr()
linkMgr.linkToMyNeighborhoodAge()
return True
if (cmnd == 'nexus'):
gender = PtGetLocalAvatar().avatar.getAvatarClothingGroup()
if (gender == 0): hisher = 'his'
elif (gender == 1): hisher = 'her'
else: hisher = 'the'
if not silent: ki.DisplayStatusMessage('%s is linking to %s Nexus' % (PtGetClientName(), hisher), 1)
PtLinkToAge("Nexus")
return True
# avatar movement
if (cmnd == 'jump'):
|
craws/OpenAtlas-Python | openatlas/models/gis.py | Python | gpl-2.0 | 7,414 | 0.000135 | import ast
from typing import Any, Dict, List, Optional
from flask import g, json
from flask_wtf import FlaskForm
from openatlas.database.gis import Gis as Db
from openatlas.models.entity import Entity
from openatlas.models.imports import Project
from openatlas.models.node import Node
from openatlas.util.util import sanitize
class InvalidGeomException(Exception):
pass
class Gis:
@staticmethod
def add_example_geom(location: Entity) -> None:
# Used for tests until model is decoupled from forms
Db.add_example_geom(location.id)
@staticmethod
def get_by_id(id_: int) -> List[Dict[str, Any]]:
return Db.get_by_id(id_)
@staticmethod
def get_all(
objects: Optional[List[Entity]] = None,
structure: Optional[Dict[str, Any]] = None) -> Dict[str, List[Any]]:
if not objects:
objects = []
all_: Dict[str, List[Any]] = {
'point': [],
'linestring': [],
'polygon': []}
extra: Dict[str, List[Any]] = {
'supers': [],
'subs': [],
'siblings': []}
selected: Dict[str, List[Any]] = {
'point': [],
'linestring': [],
'polygon': [],
'polygon_point': []}
# Include GIS of subunits which would be otherwise omitted
subunit_ids = [
subunit.id for subunit in structure['subunits']] \
if structure else []
sibling_ids = [
sibling.id for sibling in structure['siblings']] \
if structure else []
extra_ids = [0]
if structure:
extra_ids = [
objects[0].id if objects else 0] \
+ [structure['super_id']] \
+ subunit_ids \
+ sibling_ids
object_ids = [x.id for x in objects] if objects else []
for shape in ['point', 'polygon', 'linestring']:
place_root = Node.get_hierarchy('Place')
for row in Db.get_by_shape(shape, extra_ids):
description = row['description'].replace('"', '\"') \
if row['description'] else ''
object_desc = row['object_desc'].replace('"', '\"') \
if row['object_desc'] else ''
item = {
'type': 'Feature',
'geometry': json.loads(row['geojson']),
| 'properties': {
'objectId': row['object_id'],
'objectName': row['object_name'].replace('"', '\"'),
'objectDescription': object_desc,
'id': | row['id'],
'name': row['name'].replace('"', '\"')
if row['name'] else '',
'description': description,
'shapeType': row['type']}}
if 'types' in row and row['types']:
nodes_list = ast.literal_eval('[' + row['types'] + ']')
for node_id in list(set(nodes_list)):
node = g.nodes[node_id]
if node.root and node.root[-1] == place_root.id:
item['properties']['objectType'] = \
node.name.replace('"', '\"')
break
if structure and row['object_id'] == structure['super_id']:
extra['supers'].append(item)
elif row['object_id'] in object_ids:
selected[shape].append(item)
elif row['object_id'] in subunit_ids: # pragma no cover
extra['subs'].append(item)
elif row['object_id'] in sibling_ids: # pragma no cover
extra['siblings'].append(item)
else:
all_[shape].append(item)
if 'polygon_point' in row:
polygon_point_item = dict(item) # Make a copy
polygon_point_item['geometry'] = json.loads(
row['polygon_point'])
if row['object_id'] in object_ids:
selected['polygon_point'].append(polygon_point_item)
elif row['object_id'] and structure and \
row['object_id'] == structure['super_id']:
extra['supers'].append(polygon_point_item)
elif row['object_id'] in subunit_ids: # pragma no cover
extra['subs'].append(polygon_point_item)
elif row['object_id'] in sibling_ids: # pragma no cover
extra['siblings'].append(polygon_point_item)
else:
all_['point'].append(polygon_point_item)
return {
'gisPointAll': json.dumps(all_['point']),
'gisPointSelected': json.dumps(selected['point']),
'gisPointSupers': json.dumps(extra['supers']),
'gisPointSubs': json.dumps(extra['subs']),
'gisPointSibling': json.dumps(extra['siblings']),
'gisLineAll': json.dumps(all_['linestring']),
'gisLineSelected': json.dumps(selected['linestring']),
'gisPolygonAll': json.dumps(all_['polygon']),
'gisPolygonSelected': json.dumps(selected['polygon']),
'gisPolygonPointSelected': json.dumps(selected['polygon_point']),
'gisAllSelected': json.dumps(
selected['polygon']
+ selected['linestring']
+ selected['point'])}
@staticmethod
def insert(entity: Entity, form: FlaskForm) -> None:
for shape in ['point', 'line', 'polygon']:
data = getattr(form, 'gis_' + shape + 's').data
if not data:
continue # pragma: no cover
for item in json.loads(data):
if not item['geometry']['coordinates'] \
or item['geometry']['coordinates'] == [[]]:
continue # pragma: no cover
if item['properties']['shapeType'] != 'centerpoint':
Db.test_geom(json.dumps(item['geometry']))
Db.insert(
shape='linestring' if shape == 'line' else shape,
data={
'entity_id': entity.id,
'name': sanitize(item['properties']['name'], 'text'),
'description': sanitize(
item['properties']['description'],
'text'),
'type': item['properties']['shapeType'],
'geojson': json.dumps(item['geometry'])})
@staticmethod
def insert_import(
entity: Entity,
location: Entity,
project: Project,
easting: float,
northing: float) -> None:
Db.insert_import({
'entity_id': location.id,
'description':
f"Imported centerpoint of {sanitize(entity.name, 'text')} "
f"from the {sanitize(project.name, 'text')} project",
'geojson':
f'{{"type":"Point", "coordinates": [{easting},{northing}]}}'})
@staticmethod
def delete_by_entity(entity: Entity) -> None:
Db.delete_by_entity_id(entity.id)
|
kryptxy/torrench | torrench/modules/limetorrents.py | Python | gpl-3.0 | 7,173 | 0.001673 | """The Pirate Bay Module."""
import logging
import sys
from torrench.utilities.Config import Config
class LimeTorrents(Config):
"""
LimeTorrents class.
This class fetches torrents from LimeTorrents proxy,
and diplays results in tabular form.
All activities are logged and stored in a log file.
In case of errors/unexpected output, refer logs.
"""
def __init__(self, title, page_limit):
"""Initialisations."""
Config.__init__(self)
self.proxies = self.get_proxies('limetorrents')
self.proxy = None
self.title = title
self.pages = page_limit
self.logger = logging.getLogger('log1')
self.class_name = self.__class__.__name__.lower()
self.index = 0
self.page = 0
self.total_fetch_time = 0
self.mylist = []
self.masterlist = []
self.mylist_crossite = []
self.masterlist_crossite = []
self.mapper = []
self.soup_dict = {}
self.soup = None
self.headers = ['NAME', 'INDEX', 'SIZE', 'SE/LE', 'UPLOADED']
def check_proxy(self):
"""
To check proxy availability.
Proxy is checked in two steps:
1. To see if proxy 'website' is available.
2. A test is carried out with a sample string 'hello'.
If results are found, test is passed, else test failed!
This class inherits Config class. Config class inherits
Common class. The Config class provides proxies list fetched
from config file. The Common class consists of commonly used
methods.
In case of failiur, next proxy is tested with same procedure.
This continues until working proxy is found.
If no proxy is found, program exits.
"""
count = 0
for proxy in self.proxies:
print("Trying %s" % (self.colorify("yellow", proxy)))
self.logger.debug("Trying proxy: %s" % (proxy))
self.soup = self.http_request(proxy)
try:
if self.soup == -1 or 'limetorrents' not in self.soup.find('div', id='logo').a['title'].lower():
print("Bad proxy!\n")
count += 1
if count == len(self.proxies):
print("No more proxies found! Terminating")
sys.exit(2)
else:
continue
else:
print("Proxy available. Performing test...")
url = proxy+"/search/all/hello/seeds/1/"
self.logger.debug("Carrying out test for string 'hello'")
self.soup = self.http_request(url)
test = sel | f.soup.find('table', class_='table2')
if test is not None:
| self.proxy = proxy
print("Pass!")
self.logger.debug("Test passed!")
break
else:
print("Test failed!\nPossibly site not reachable. See logs.")
self.logger.debug("Test failed!")
except (AttributeError, Exception) as e:
self.logger.exception(e)
pass
def get_html(self):
"""
To get HTML page.
Once proxy is found, the HTML page for
corresponding search string is fetched.
Also, the time taken to fetch that page is returned.
Uses http_request_time() from Common.py module.
"""
try:
for self.page in range(self.pages):
print("\nFetching from page: %d" % (self.page+1))
search = "/search/all/{}/seeds/{}/".format(self.title, self.page+1)
self.soup, time = self.http_request_time(self.proxy + search)
self.logger.debug("fetching page %d/%d" % (self.page+1, self.pages))
print("[in %.2f sec]" % (time))
self.logger.debug("page fetched in %.2f sec!" % (time))
self.total_fetch_time += time
self.soup_dict[self.page] = self.soup
except Exception as e:
self.logger.exception(e)
print("Error message: %s" %(e))
print("Something went wrong! See logs for details. Exiting!")
sys.exit(2)
def parse_html(self):
"""
Parse HTML to get required results.
Results are fetched in masterlist list.
Also, a mapper[] is used to map 'index'
with torrent name, link and magnetic link
"""
try:
for page in self.soup_dict:
self.soup = self.soup_dict[page]
content = self.soup.find('table', class_='table2')
if content is None:
return
results = content.findAll('tr')
for result in results[1:]:
data = result.findAll('td')
# try block is limetorrents-specific. Means only limetorrents requires this.
try:
name = data[0].findAll('a')[1].string
link = data[0].findAll('a')[1]['href']
link = self.proxy+link
date = data[1].string
date = date.split('-')[0]
size = data[2].string
seeds = data[3].string.replace(',', '')
leeches = data[4].string.replace(',', '')
seeds_color = self.colorify("green", seeds)
leeches_color = self.colorify("red", leeches)
self.index += 1
self.mapper.insert(self.index, (name, link, self.class_name))
self.mylist = [name, "--" +
str(self.index) + "--", size, seeds_color+'/'+
leeches_color, date]
self.masterlist.append(self.mylist)
self.mylist_crossite = [name, self.index, size, seeds+'/'+leeches, date]
self.masterlist_crossite.append(self.mylist_crossite)
except Exception as e:
self.logger.exception(e)
pass
except Exception as e:
self.logger.exception(e)
print("Error message: %s" % (e))
print("Something went wrong! See logs for details. Exiting!")
sys.exit(2)
def main(title, page_limit):
"""Execution begins here."""
try:
print("\n[LimeTorrents]\n")
print("Obtaining proxies...")
lmt = LimeTorrents(title, page_limit)
lmt.check_proxy()
lmt.get_html()
lmt.parse_html()
lmt.post_fetch() # defined in Common.py
print("\nBye!")
except KeyboardInterrupt:
lmt.logger.debug("Keyboard interupt! Exiting!")
print("\n\nAborted!")
def cross_site(title, page_limit):
lmt = LimeTorrents(title, page_limit)
return lmt
if __name__ == "__main__":
print("It's a module!")
|
KrzysztofStachanczyk/Sensors-WWW-website | www/env/lib/python2.7/site-packages/django/urls/base.py | Python | gpl-3.0 | 5,668 | 0.001059 | from __future__ import unicode_literals
from threading import local
from django.utils import six
from django.utils.encoding import force_text, iri_to_uri
from django.utils.functional import lazy
from django.utils.six.moves.urllib.parse import urlsplit, urlunsplit
from django.utils.translation import override
from .exceptions import NoReverseMatch, Resolver404
from .resolvers import get_ns_resolver, get_resolver
from .utils import get_callable
# SCRIPT_NAME prefixes for each thread are stored here. If there's no entry for
# the current thread (which is the only one we ever access), it is assumed to
# be empty.
_prefixes = local()
# Overridden URLconfs for each thread are stored here.
_urlconfs = local()
def resolve(path, urlconf=None):
if urlconf is None:
urlconf = get_urlconf()
return get_resolver(urlconf).resolve(path)
def reverse(viewname, urlconf=None, args=None, kwargs=None, current_app=None):
if urlconf is None:
urlconf = get_urlconf()
resolver = get_resolver(urlconf)
args = args or []
kwargs = kwargs or {}
prefix = get_script_prefix()
if not isinstance(viewname, six.string_types):
view = viewname
else:
parts = viewname.split(':')
parts.reverse()
view = parts[0]
path = parts[1:]
if current_app:
current_path = current_app.split(':')
current_path.reverse()
else:
current_path = None
resolved_path = []
ns_pattern = ''
while path:
ns = path.pop()
current_ns = current_path.pop() if current_path else None
# Lookup the name to see if it could be an app identifier.
| try:
app_list = resolver.app_dict[ns]
# Yes! Path part matches an app in the current Resolver.
if current_ns and current_ns in app_list:
# If we are reversing for a particular app, use that
# namespace.
ns = current_ns
elif ns not in app_list:
| # The name isn't shared by one of the instances (i.e.,
# the default) so pick the first instance as the default.
ns = app_list[0]
except KeyError:
pass
if ns != current_ns:
current_path = None
try:
extra, resolver = resolver.namespace_dict[ns]
resolved_path.append(ns)
ns_pattern = ns_pattern + extra
except KeyError as key:
if resolved_path:
raise NoReverseMatch(
"%s is not a registered namespace inside '%s'" %
(key, ':'.join(resolved_path))
)
else:
raise NoReverseMatch("%s is not a registered namespace" % key)
if ns_pattern:
resolver = get_ns_resolver(ns_pattern, resolver)
return force_text(iri_to_uri(resolver._reverse_with_prefix(view, prefix, *args, **kwargs)))
reverse_lazy = lazy(reverse, six.text_type)
def clear_url_caches():
get_callable.cache_clear()
get_resolver.cache_clear()
get_ns_resolver.cache_clear()
def set_script_prefix(prefix):
"""
Set the script prefix for the current thread.
"""
if not prefix.endswith('/'):
prefix += '/'
_prefixes.value = prefix
def get_script_prefix():
"""
Return the currently active script prefix. Useful for client code that
wishes to construct their own URLs manually (although accessing the request
instance is normally going to be a lot cleaner).
"""
return getattr(_prefixes, "value", '/')
def clear_script_prefix():
"""
Unset the script prefix for the current thread.
"""
try:
del _prefixes.value
except AttributeError:
pass
def set_urlconf(urlconf_name):
"""
Set the URLconf for the current thread (overriding the default one in
settings). If urlconf_name is None, revert back to the default.
"""
if urlconf_name:
_urlconfs.value = urlconf_name
else:
if hasattr(_urlconfs, "value"):
del _urlconfs.value
def get_urlconf(default=None):
"""
Return the root URLconf to use for the current thread if it has been
changed from the default one.
"""
return getattr(_urlconfs, "value", default)
def is_valid_path(path, urlconf=None):
"""
Return True if the given path resolves against the default URL resolver,
False otherwise. This is a convenience method to make working with "is
this a match?" cases easier, avoiding try...except blocks.
"""
try:
resolve(path, urlconf)
return True
except Resolver404:
return False
def translate_url(url, lang_code):
"""
Given a URL (absolute or relative), try to get its translated version in
the `lang_code` language (either by i18n_patterns or by translated regex).
Return the original URL if no translated version is found.
"""
parsed = urlsplit(url)
try:
match = resolve(parsed.path)
except Resolver404:
pass
else:
to_be_reversed = "%s:%s" % (match.namespace, match.url_name) if match.namespace else match.url_name
with override(lang_code):
try:
url = reverse(to_be_reversed, args=match.args, kwargs=match.kwargs)
except NoReverseMatch:
pass
else:
url = urlunsplit((parsed.scheme, parsed.netloc, url, parsed.query, parsed.fragment))
return url
|
paris-ci/CloudBot | plugins/password.py | Python | gpl-3.0 | 2,551 | 0.00196 | # import string
import random as std_random
from cloudbot import hook
try:
# noinspection PyUnresolvedReferences
from Crypto.Random import random
gen = random.StrongRandom()
except ImportError:
# Just use the regular random module, not the strong one
gen = std_random.SystemRandom()
with open("data/password_words.txt") as f:
common_words = [line.strip() for line in f.readlines()]
# @hook.command(autohelp=False)
# def password(text, notice):
# """[length [types]] - generates a password of <length> (default 10). [types] can include 'alpha', 'no caps',
# 'numeric', 'symbols' or any combination: eg. 'numbers symbols'"""
# okay = []
#
# # find the length needed for the password
# numb = text.split(" ")
#
# try:
# length = int(numb[0])
# except ValueError:
# length = 12
#
# if length > 50:
# notice("Maximum length is 50 characters.")
# return
#
# # add alpha characters
# if "alpha" in text or "letter" in text:
# okay += list(string.ascii_lowercase)
# # adds capital characters if not told not to
# if "no caps" not in text:
# okay += list(string.ascii_uppercase)
#
# # add numbers
# if "numeric" in text or "number" in text:
# okay += list(string.digits)
#
# # add symbols
# if "symbol" in text or "special" in text:
# sym = ['!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '-', '=', '_', '+', '[', ']', '{', '}', '\\', '|', ';',
# ':', "'", '.', '>', ',', '<', '/', '?', '`', '~', '"']
# okay += sym
#
# # defaults to lowercase alpha + numbers password if the okay list is empty
# if not okay:
# okay = list(string.ascii_lowercase) + list(string.digits)
#
# # extra random lel
# std_random.shuffle(okay)
# chars = []
#
# for i in range(length):
# chars.append(random.choice(okay))
#
# notice("".join(chars))
@hook.command("wpass", "wordpass", "wordpassword", autohelp=False)
def word_password(text, notice):
"""[length] - generates an | easy to remember password with [length] (default 4) commonly used words"""
try:
length = int(text)
except ValueError:
length = 3
if length > 10:
notice("Maximum length is 50 characters.")
return
words = []
# generate password
for x in range(length):
words.append(gen.choice(common_word | s))
notice("Your password is '{}'. Feel free to remove the spaces when using it.".format(" ".join(words)))
|
3dfxmadscientist/CBSS | openerp/tools/mail.py | Python | agpl-3.0 | 14,926 | 0.00335 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2012-2013 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
import cgi
import logging
import lxml.html
import lxml.html.clean as clean
import openerp.pooler as pooler
import random
import re
import socket
import threading
import time
from email.utils import getaddresses
from openerp.loglevels import ustr
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# HTML Sanitizer
#----------------------------------------------------------
tags_to_kill = ["script", "head", "meta", "title", "link", "style", "frame", "iframe", "base", "object", "embed"]
tags_to_remove = ['html', 'body', 'font']
def html_sanitize(src):
if not src:
return src
src = ustr(src, errors='replace')
# html encode email tags
part = re.compile(r"(<(([^a<>]|a[^<>\s])[^<>]*)@[^<>]+>)", re.IGNORECASE | re.DOTALL)
src = part.sub(lambda m: cgi.escape(m.group(1)), src)
# some corner cases make the parser crash (such as <SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT> in test_mail)
try:
cleaner = clean.Cleaner(page_structure=True, style=False, safe_attrs_only=False, forms=False, kill_tags=tags_to_kill, remove_tags=tags_to_remove)
cleaned = cleaner.clean_html(src)
except TypeError:
# lxml.clean version < 2.3.1 does not have a kill_tags attribute
# to remove in 2014
cleaner = clean.Cleaner(page_structure=True, style=False, safe_attrs_only=False, forms=False, remove_tags=tags_to_kill + tags_to_remove)
cleaned = cleaner.clean_html(src)
except Exception, e:
if isinstance(e, etree.ParserError) and 'empty' in str(e):
return ""
_logger.warning('html_sanitize failed to parse %s' % (src))
cleaned = '<p>Impossible to parse</p>'
# MAKO compatibility: $, { and } inside quotes are escaped, preventing correct mako execution
cleaned = cleaned.replace('%24', '$')
cleaned = cleaned.replace('%7B', '{')
cleaned = cleaned.replace('%7D', '}')
cleaned = cleaned.replace('%20', ' ')
cleaned = cleaned.replace('%5B', '[')
cleaned = cleaned.replace('%5D', ']')
return cleaned
#----------------------------------------------------------
# HTML Cleaner
#----------------------------------------------------------
def html_email_clean(html):
""" html_email_clean: clean the html to display in the web client.
- strip email quotes (remove blockquote nodes)
- strip signatures (remove --\n{\n)Blahblah), by replacing <br> by
\n to avoid ignoring signatures converted into html
:param string html: sanitized html; tags like html or head should not
be present in the html string. This method therefore takes as input
html code coming from a sanitized source, like fields.html.
"""
def _replace_matching_regex(regex, source, replace=''):
dest = ''
idx = 0
for item in re.finditer(regex, source):
dest += source[idx:item.start()] + replace
idx = item.end()
dest += source[idx:]
return dest
if not html or not isinstance(html, basestring):
return html
html = ustr(html)
# 0. remove encoding attribute inside tags
doctype = re.compile(r'(<[^>]*\s)(encoding=(["\'][^"\']*?["\']|[^\s\n\r>]+)(\s[^>]*|/)?>)', re.IGNORECASE | re.DOTALL)
html = doctype.sub(r"", html)
# 1. <br[ /]> -> \n, because otherwise the tree is obfuscated
br_tags = re.compile(r'([<]\s*[bB][rR]\s*\/?[>])')
html = _replace_matching_regex(br_tags, html, '__BR_TAG__')
# 2. form a tree, handle (currently ?) pure-text by enclosing them in a pre
root = lxml.html.fromstring(html)
if not len(root) and root.text is None and root.tail is None:
html = '<div>%s</div>' % html
root = lxml.html.fromstring(html)
# 2.5 remove quoted text in nodes
quote_tags = re.compile(r'(\n(>)+[^\n\r]*)')
for node i | n root.getiterator():
if not node.text:
continue
node.text = _replace_matching_regex(quote_tags, node.text)
# 3. remove blockquotes
quotes = [el for el in root.getiterator(tag='blockquote')]
for node in quotes:
# copy the node tail into parent text
if node.tail:
parent = node.getparent()
parent.text = parent.text or '' + node.tail
# remove the node
node.getparent().remove(node | )
# 4. strip signatures
signature = re.compile(r'([-]{2}[\s]?[\r\n]{1,2}[^\z]+)')
for elem in root.getiterator():
if elem.text:
match = re.search(signature, elem.text)
if match:
elem.text = elem.text[:match.start()] + elem.text[match.end():]
if elem.tail:
match = re.search(signature, elem.tail)
if match:
elem.tail = elem.tail[:match.start()] + elem.tail[match.end():]
# 5. \n back to <br/>
html = etree.tostring(root, pretty_print=True)
html = html.replace('__BR_TAG__', '<br />')
# 6. Misc cleaning :
# - ClEditor seems to love using <div><br /><div> -> replace with <br />
br_div_tags = re.compile(r'(<div>\s*<br\s*\/>\s*<\/div>)')
html = _replace_matching_regex(br_div_tags, html, '<br />')
return html
#----------------------------------------------------------
# HTML/Text management
#----------------------------------------------------------
def html2plaintext(html, body_id=None, encoding='utf-8'):
""" From an HTML text, convert the HTML to plain text.
If @param body_id is provided then this is the tag where the
body (not necessarily <body>) starts.
"""
## (c) Fry-IT, www.fry-it.com, 2007
## <peter@fry-it.com>
## download here: http://www.peterbe.com/plog/html2plaintext
html = ustr(html)
tree = etree.fromstring(html, parser=etree.HTMLParser())
if body_id is not None:
source = tree.xpath('//*[@id=%s]' % (body_id,))
else:
source = tree.xpath('//body')
if len(source):
tree = source[0]
url_index = []
i = 0
for link in tree.findall('.//a'):
url = link.get('href')
if url:
i += 1
link.tag = 'span'
link.text = '%s [%s]' % (link.text, i)
url_index.append(url)
html = ustr(etree.tostring(tree, encoding=encoding))
# \r char is converted into , must remove it
html = html.replace(' ', '')
html = html.replace('<strong>', '*').replace('</strong>', '*')
html = html.replace('<b>', '*').replace('</b>', '*')
html = html.replace('<h3>', '*').replace('</h3>', '*')
html = html.replace('<h2>', '**').replace('</h2>', '**')
html = html.replace('<h1>', '**').replace('</h1>', '**')
html = html.replace('<em>', '/').replace('</em>', '/')
html = html.replace('<tr>', '\n')
html = html.replace('</p>', '\n')
html = re.sub('<br\s*/?>', '\n', html)
html = re.sub('<.*?>', ' ', html)
html = html.replace(' ' * 2, ' ')
# strip all lines
html = '\n'.join([x.strip() for x in html.splitlines()])
html = html.replace('\n' * 2, '\n')
for i, url in enumerate(url_index):
if i == 0:
html += '\n\n'
|
levilucio/SyVOLT | GM2AUTOSAR_MM/overlap_rules/models/MapVirtualDeviceFAULTY_overlap_MDL.py | Python | mit | 7,044 | 0.006814 | """
__MapVirtualDeviceFAULTY_overlap_MDL.py_____________________________________________________
Automatically generated AToM3 Model File (Do not modify directly)
Author: gehan
Modified: Tue Nov 5 10:14:40 2013
____________________________________________________________________________________________
"""
from stickylink import *
from widthXfillXdecoration import *
from MT_pre__VirtualDevice import *
from LHS import *
from graph_MT_pre__VirtualDevice import *
from graph_LHS import *
from ATOM3Enum import *
from ATOM3String import *
from ATOM3BottomType import *
from ATOM3Constraint import *
from ATOM3Attribute import *
from ATOM3Float import *
from ATOM3List import *
from ATOM3Link import *
from ATOM3Connection import *
from ATOM3Boolean import *
from ATOM3Appearance import *
from ATOM3Text import *
from ATOM3Action import *
from ATOM3Integer import *
from ATOM3Port import *
from ATOM3MSEnum import *
def MapVirtualDeviceFAULTY_overlap_MDL(self, rootNode, MT_pre__GM2AUTOSAR_MMRootNode=None, MoTifRuleRootNode=None):
# --- Generating attributes code for ASG MT_pre__GM2AUTOSAR_MM ---
if( MT_pre__GM2AUTOSAR_MMRootNode ):
# author
MT_pre__GM2AUTOSAR_MMRootNode.author.setValue('Annonymous')
# description
MT_pre__GM2AUTOSAR_MMRootNode.description.setValue('\n')
MT_pre__GM2AUTOSAR_MMRootNode.description.setHeight(15)
# name
MT_pre__GM2AUTOSAR_MMRootNode.name.setValue('')
MT_pre__GM2AUTOSAR_MMRootNode.name.setNone()
# --- ASG attributes over ---
# --- Generating attributes code for ASG MoTifRule ---
if( MoTifRuleRootNode ):
# author
MoTifRuleRootNode.author.setValue('Annonymous')
# description
MoTifRuleRootNode.description.setValue('\n')
MoTifRuleRootNode.description.setHeight(15)
# name
MoTifRuleRootNode.name.setValue('MapVirtualDeviceFAULTY_overlap')
# --- ASG attributes over ---
self.obj1581=MT_pre__VirtualDevice(self)
self.obj1581.isGraphObjec | tVisual = True
if(hasattr(self.obj1581, '_setHierarchicalLink')):
self.obj1581._setHierarchicalLink(False)
# MT_pivotOut__
self.obj1581.MT_pivotOut__.setValue('')
self.obj1581.MT_pivotOut__.setNone()
# MT_subtypeMatching__
self.obj1581.MT_subtypeMatching__.setValue(('True', 0))
self.obj1581.MT_subtypeMatching__.config = 0
# MT_pre__classtype
self.obj1581.MT_pre__classtype.setValue('\n#========== | =====================================================================\n# This code is executed when evaluating if a node shall be matched by this rule.\n# You can access the value of the current node\'s attribute value by: attr_value.\n# You can access any attribute x of this node by: this[\'x\'].\n# If the constraint relies on attribute values from other nodes,\n# use the LHS/NAC constraint instead.\n# The given constraint must evaluate to a boolean expression.\n#===============================================================================\n\nreturn True\n')
self.obj1581.MT_pre__classtype.setHeight(15)
# MT_pivotIn__
self.obj1581.MT_pivotIn__.setValue('')
self.obj1581.MT_pivotIn__.setNone()
# MT_label__
self.obj1581.MT_label__.setValue('1')
# MT_pre__cardinality
self.obj1581.MT_pre__cardinality.setValue('\n#===============================================================================\n# This code is executed when evaluating if a node shall be matched by this rule.\n# You can access the value of the current node\'s attribute value by: attr_value.\n# You can access any attribute x of this node by: this[\'x\'].\n# If the constraint relies on attribute values from other nodes,\n# use the LHS/NAC constraint instead.\n# The given constraint must evaluate to a boolean expression.\n#===============================================================================\n\nreturn True\n')
self.obj1581.MT_pre__cardinality.setHeight(15)
# MT_pre__name
self.obj1581.MT_pre__name.setValue('\n#===============================================================================\n# This code is executed when evaluating if a node shall be matched by this rule.\n# You can access the value of the current node\'s attribute value by: attr_value.\n# You can access any attribute x of this node by: this[\'x\'].\n# If the constraint relies on attribute values from other nodes,\n# use the LHS/NAC constraint instead.\n# The given constraint must evaluate to a boolean expression.\n#===============================================================================\n\nreturn True\n')
self.obj1581.MT_pre__name.setHeight(15)
self.obj1581.graphClass_= graph_MT_pre__VirtualDevice
if self.genGraphics:
new_obj = graph_MT_pre__VirtualDevice(200.0,140.0,self.obj1581)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("MT_pre__VirtualDevice", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
new_obj.layConstraints['scale'] = [1.0, 1.0]
else: new_obj = None
self.obj1581.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj1581)
self.globalAndLocalPostcondition(self.obj1581, rootNode)
self.obj1581.postAction( rootNode.CREATE )
self.obj1580=LHS(self)
self.obj1580.isGraphObjectVisual = True
if(hasattr(self.obj1580, '_setHierarchicalLink')):
self.obj1580._setHierarchicalLink(False)
# constraint
self.obj1580.constraint.setValue('#===============================================================================\n# This code is executed after the nodes in the LHS have been matched.\n# You can access a matched node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# The given constraint must evaluate to a boolean expression:\n# returning True enables the rule to be applied,\n# returning False forbids the rule from being applied.\n#===============================================================================\n\nreturn True\n')
self.obj1580.constraint.setHeight(15)
self.obj1580.graphClass_= graph_LHS
if self.genGraphics:
new_obj = graph_LHS(60.0,60.0,self.obj1580)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("LHS", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
new_obj.layConstraints['scale'] = [1.0, 1.0]
else: new_obj = None
self.obj1580.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj1580)
self.globalAndLocalPostcondition(self.obj1580, rootNode)
self.obj1580.postAction( rootNode.CREATE )
# Connections for obj1581 (graphObject_: Obj21) of type MT_pre__VirtualDevice
self.drawConnections(
)
# Connections for obj1580 (graphObject_: Obj20) of type LHS
self.drawConnections(
)
newfunction = MapVirtualDeviceFAULTY_overlap_MDL
loadedMMName = ['MT_pre__GM2AUTOSAR_MM_META', 'MoTifRule_META']
atom3version = '0.3'
|
jledbetter/openhatch | mysite/scripts/clean_data_for_academic_analysis.py | Python | agpl-3.0 | 1,935 | 0.007235 | #!/usr/bin/python
# This file is part of OpenHatch.
# Copyright (C) 2010 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
### The purpose of this script is to create a version of the database
### that helps a U Mass Amherst researcher look through the OpenHatch
### data and perform text classification and other text analysis.
### To protect our users' privacy, we:
### * set the password column to the emp | ty string
### * set the email | column to the empty string
### * delete (from the database) any PortfolioEntry that is_deleted
### * delete (from the database) any Citation that is_deleted
### * delete all WebResponse objects
import mysite.profile.models
import django.contrib.auth.models
### set the email and password columns to the empty string
for user in django.contrib.auth.models.User.objects.all():
user.email = ''
user.password = ''
user.save()
### delete PortfolioEntry instances that is_deleted
for pfe in mysite.profile.models.PortfolioEntry.objects.all():
if pfe.is_deleted:
pfe.delete()
### delete Citation instances that is_deleted
for citation in mysite.profile.models.Citation.objects.all():
if citation.is_deleted:
citation.delete()
### delete all WebResponse objects
for wr in mysite.customs.models.WebResponse.objects.all():
wr.delete()
|
McBen/ingress-intel-total-conversion | buildsettings.py | Python | isc | 2,320 | 0.006466 | # settings file for builds.
# if you want to have custom builds, copy this file to "localbuildsettings.py" and make changes there.
# possible fields:
# resourceBaseUrl - optional - the URL base for external resources (all resources embedded in standard IITC)
# distUrlBase - optional - the base URL to use for update checks
# buildMobile - optional - if set, mobile builds are built with 'ant'. requires the Android SDK and appropriate mobile/local.properties file configured
# preBuild - optional - an array of strings to run as commands, via os.system, before building the scripts
# postBuild - optional - an array of string to run as commands, via os.system, after all builds are complete
buildSettings = {
# local: use this build if you're not modifying external resources
# no external resources allowed - they're not needed any more
'local': {
'resourceUrlBase': 'http://localhost:8100',
'distUrlBase': 'http://localhost:8100',
},
# local8000: if you need to modify external resources, this build will load them from
# the web server at http://0.0.0.0:8000/dist
# (This shouldn't be required any more - all resources are embedded. but, it remains just in case some new feature
# needs external resources)
'local8000': {
'resourceUrlBase': 'http://0.0.0.0:8000/dist',
'distUrlBase': None,
},
# mobile: default entry that also builds the mobile .apk
# you will need to have the android-sdk installed, and the file mobil | e/local.properties created as required
'mobile': {
'resourceUrlBase': None,
'distUrlBase': None,
'buildMobile': 'debug',
},
# if you want to publish your own fork of the project, and host it on your own web site
# create a localbuildsettings | .py file containing something similar to this
# note: Firefox+Greasemonkey require the distUrlBase to be "https" - they won't check for updates on regular "http" URLs
#'example': {
# 'resourceBaseUrl': 'http://www.example.com/iitc/dist',
# 'distUrlBase': 'https://secure.example.com/iitc/dist',
#},
}
# defaultBuild - the name of the default build to use if none is specified on the build.py command line
# (in here as an example - it only works in localbuildsettings.py)
#defaultBuild = 'local'
|
OpenC-IIIT/prosfair | gui/gui_basic.py | Python | mit | 23,370 | 0.046598 | import pygame, sys
from pygame.locals import *
import re
import json
import imp
import copy
#chessboard = json.load(open("./common/initial_state.json"))
chessboard1 = json.load(open("./common/initial_state.json"))
chessboard2 = json.load(open("./common/initial_state.json"))
chessboard3 = json.load(open("./common/initial_state.json"))
#created 3 chessboards for now
chessboards = [chessboard1, chessboard2, chessboard3]
chessboard = chessboards[0] #current board set to the first.
image_dir = "./res/basic_chess_pieces/"
rules = imp.load_source('chess_basic_rules','./common/rules.py')
cpu = imp.load_source('chess_minimax_ai','./ai/cpu.py')
helper = imp.load_source('helper_functions','./common/helper_functions.py')
opposite = { "white" : "black" , "black" : "white" }
def get_chess_square(x,y,size):
return [ x/size+1,y/size+1]
def get_chess_square_reverse(a,b,size):
return ((a-1)*size/8,(b-1)*size/8)
def get_chess_square_border(r, s, size):
return((r-1)*size/8+2, (s-1)*size/8+2)
pygame.init()
screen = pygame.display.set_mode((600, 600))
def draw_chessboard( board, size,p_list = None):
SIZE = size
GRAY = (150, 150, 150)
WHITE = (255, 255, 255)
BLUE = ( 0 , 0 , 150)
screen.fill(WHITE)
#filling gray square blocks of size/8 alternatively
startX = 0
startY = 0
for e in range(0, 8):
if e%2 == 0 :
startX = 0
else:
startX = SIZE/8
for e2 in range(0, 8):
pygame.draw.rect(screen, GRAY, ((startX, startY), (SIZE/8, SIZE/8)))
startX += 2* SIZE/8
startY += SIZE/8
#placing the correspoding images of the pieces on the blocks
for army in board.keys():
for k in board[army].keys():
img = pygame.image.load(image_dir + army + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[army][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[army][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
#if any piece is selected and has some legal moves then display blue squares on corresponding valid move block
if p_list:
for p in p_list:
pygame.draw.rect(screen,BLUE,(get_chess_square_reverse(p[1],p[0],SIZE),(SIZE/8,SIZE/8)))
if (p[1]+p[0])%2!=0:
pygame.draw.rect(screen, WHITE, (get_chess_square_border(p[1], p[0], SIZE), (SIZE/8-4, SIZE/8-4)))
else:
pygame.draw.rect(screen, GRAY, (get_chess_square_border(p[1], p[0], SIZE), (SIZE/8-4, SIZE/8-4)))
x, y = p[1], p[0]
for x in ['white','black']:
for k in board[x].keys():
if board[x][k][1] == p[1] and board[x][k][0] == p[0]: #print k
if "bishop" in k:
img = pygame.image.load(image_dir + x + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[x][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[x][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
elif "pawn" in k:
img = pygame.image.load(image_dir + x + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[x][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[x][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
elif "knight" in k:
img = pygame.image.load(image_dir + x + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[x][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[x][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
elif "rook" in k:
img = pygame.image.load(image_dir + x + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[x][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[x][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
elif "queen" in k:
img = pygame.image.load(image_dir + x + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[x][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[x][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
elif "king" in k:
img = pygame.image.load(image_dir + x + "_" + re.findall('[a-z]+',k)[0]+'.png')
screen.blit(img,( board[x][k][1]*SIZE/8 - SIZE/8+SIZE/80, board[x][k][0] * SIZE/8 - SIZE/8+SIZE/80 ))
pygame.display.update()
def looping_cpu_vs_human(board,size):
global chessboards
global flag
SIZE = size
draw_chessboard(board,size)
cur=0
old_x=0
old_y=0
new_x=0
new_y=0
color = "white"
flag= 0
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
#checking for keyboard events
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RIGHT:
cur = (cur+1)%3
board = chessboards[cur]
if event.key == pygame.K_LEFT:
cur = (cur+2)%3
board = chessboards[cur]
#updating the screen with the next or prev chessboard
draw_chessboard(board,size)
if event.type == pygame.MOUSEBUTTONDOWN:
if flag == 1:
flag =0
x,y= pygame.mouse.get_pos()
new_x,new_y = get_chess_square(x,y,SIZE/8)
#print new_x,new_y
| valid = False
| for x in ['white','black']:
for k in board[x].keys():
if board[x][k][1] == old_x and board[x][k][0] == old_y:
if "bishop" in k:
if [new_y,new_x] in rules.legal_bishop_moves(board,x,k): valid = True
elif "pawn" in k:
if [new_y,new_x] in rules.legal_pawn_moves(board,x,k): valid = True
elif "knight" in k:
if [new_y,new_x] in rules.legal_knight_moves(board,x,k): valid = True
elif "rook" in k:
if [new_y,new_x] in rules.legal_rook_moves(board,x,k): valid = True
elif "queen" in k:
if [new_y,new_x] in rules.legal_queen_moves(board,x,k): valid = True
elif "king" in k:
if [new_y,new_x] in rules.legal_king_moves(board,x,k): valid = True
#if piece is moved to valid position then update the piece's coordinates and check if it is killing other piece
if valid and x == color:
board[x][k][1] = new_x
board[x][k][0] = new_y
killed_piece = None
for k,v in board[opposite[x]].iteritems():
if v[0] == new_y and v[1] == new_x:
killed_piece = k
|
moiseslorap/RIT | Computer Science 1/Labs/lab9/hashtable.py | Python | mit | 4,321 | 0.010414 | """
description: open addressing Hash Table for CS 141 Lecture
file: hashtable.py
language: python3
author: sps@cs.rit.edu Sean Strout
author: scj@cs.rit.edu Scott Johnson
"""
from rit_lib import *
class HashTable(struct):
"""
The HashTable data structure contains a collection of values
where each value is located by a hashable key.
No two values may have the same key, but more than one
key may have the same value.
table is the list holding the hash table
| size is the number of elements in occupying the hashtable
"""
_slots = ((list, 'table'), (int, 'size'))
def | HashTableToStr(self):
"""
HashTableToStr: HashTable -> String
"""
result = ""
for i in range(len(self.table)):
e = self.table[i]
if not e == None:
result += str(i) + ": "
result += e.EntryToStr() + "\n"
return result
def hash_function(self, name):
"""
hash_function: K NatNum -> NatNum
Compute a hash of the val string that is in [0 ... n).
"""
hashval = 0
for letter in name:
hashval += (ord(letter) - ord('a'))
hallnum = hashval % len(self.table)
# hashcode = 0
# hashcode = len(val) % n
return hallnum
def keys(self):
"""
keys: HashTable(K, V) -> List(K)
Return a list of keys in the given hashTable.
"""
result = []
for entry in self.table:
if entry != None:
result.append(entry.key)
return result
def has(self, key):
"""
has: HashTable(K, V) K -> Boolean
Return True iff hTable has an entry with the given key.
"""
index = self.hash_function(key)
startIndex = index # We must make sure we don't go in circles.
while self.table[ index ] != None and self.table[ index ].key != key:
index = (index + 1) % len(self.table)
if index == startIndex:
return False
return self.table[ index ] != None
def put(self, key, value):
"""
put: HashTable(K, V) K V -> Boolean
Using the given hash table, set the given key to the
given value. If the key already exists, the given value
will replace the previous one already in the table.
If the table is full, an Exception is raised.
"""
index = self.hash_function(key)
startIndex = index # We must make sure we don't go in circles.
while self.table[ index ] != None and self.table[ index ].key != key:
index = (index + 1) % len(self.table)
if index == startIndex:
raise Exception("Hash table is full.")
if self.table[ index ] == None:
self.table[ index ] = Entry(key, value)
self.size += 1
else:
self.table[ index ].value = value
return True
def get( self, key):
"""
get: HashTable(K, V) K -> V
Return the value associated with the given key in
the given hash table.
Precondition: self.has(key)
"""
index = self.hash_function(key)
startIndex = index # We must make sure we don't go in circles.
while self.table[ index ] != None and self.table[ index ].key != key:
index = (index + 1) % len(self.table)
if index == startIndex:
raise Exception("Hash table does not contain key.")
if self.table[ index ] == None:
raise Exception("Hash table does not contain key:", key)
else:
return self.table[ index ].value
def createHashTable(capacity=100):
"""
createHashTable: NatNum? -> HashTable
"""
if capacity < 2:
capacity = 2
aHashTable = HashTable([None for _ in range(capacity)], 0)
return aHashTable
class Entry(struct):
"""
A class used to hold key/value pairs.
"""
_slots = ((object, "key"), (object, "value"))
def EntryToStr( self ):
"""
EntryToStr: Entry -> String
return the string representation of the entry.
"""
return "(" + str(self.key) + ", " + str(self.value) + ")"
|
tg-msft/azure-sdk-tools | packages/python-packages/api-stub-generator/tests/docstring_parser_test.py | Python | mit | 7,115 | 0.004498 | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from apistub.nodes import DocstringParser
from apistub.nodes import ArgType
docstring_standard_return_type = """
Dummy docstring to verify standard return types and param types
:rtype: str
"""
docstring_Union_return_type1 = """
Dummy docstring to verify standard return types and param types
:rtype: Union[str, int]
"""
docstring_Union_return_type2 = """
Dummy docstring to verify standard return types and param types
:rtype: Union(str, int)
Dummy string at new line
"""
docstring_union_return_type3 = """
Dummy docstring to verify standard return types and param types
:rtype: union[str, int]
"""
docstring_multi_ret_type = """
Dummy docstring to verify standard return types and param types
:rtype: str or ~azure.test.testclass or None
"""
docstring_dict_ret_type = """
Dummy docstring to verify standard return types and param types
:rtype: dict[str, int]
"""
docstring_param_type = """
:param str name: Dummy name param
:param val: Value type
:type val: str
"""
docstring_param_type1 = """
:param str name: Dummy name param
:param val: Value type
:type val: str
:param ~azure.core.pipeline pipeline: dummy pipeline param
:param pipe_id: pipeline id
:type pipe_id: Union[str, int]
:param data: Dummy data
:type data: str or
~azure.dummy.datastream
"""
docstring_param_typing_optional = """
:param group: Optional group to check if user exists in.
:type group: typing.Optional[str]
"""
docstring_param_nested_union = """
:param dummyarg: Optional group to check if user exists in.
:type dummyarg: typing.Union[~azure.eventhub.EventDataBatch, List[~azure.eventhub.EventData]]
"""
docstring_multi_complex_type = """
:param documents: The set of documents to process as part of this batch.
If you wish to specify the ID and country_hint on a per-item basis you must
use as input a list[:class:`~azure.ai.textanalytics.DetectLanguageInput`] or a list of
dict representations of :class:`~azure.ai.te | xtanalytics.DetectLanguageInput`, like
`{"id": "1", "country_hint": "us", "text": "hello world"}`.
:type documents:
list[str] or list[~azure.ai.textanalytics.DetectLanguageInput] or list[dict[str, str]]
:keyword str country_hint: A country hint for the entire batch. Accepts two
letter country codes specified by ISO 3166 | -1 alpha-2. Per-document
country hints will take precedence over whole batch hints. Defaults to
"US". If you don't want to use a country hint, pass the string "none".
:keyword str model_version: This value indicates which model will
be used for scoring, e.g. "latest", "2019-10-01". If a model-version
is not specified, the API will default to the latest, non-preview version.
:keyword bool show_stats: If set to true, response will contain document
level statistics.
:return: The combined list of :class:`~azure.ai.textanalytics.DetectLanguageResult` and
:class:`~azure.ai.textanalytics.DocumentError` in the order the original documents were
passed in.
:rtype: list[~azure.ai.textanalytics.DetectLanguageResult,
~azure.ai.textanalytics.DocumentError]
:raises ~azure.core.exceptions.HttpResponseError or TypeError or ValueError:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_detect_language.py
:start-after: [START batch_detect_language]
:end-before: [END batch_detect_language]
:language: python
:dedent: 8
:caption: Detecting language in a batch of documents.
"""
docstring_param_type_private = """
:param str name: Dummy name param
:param client: Value type
:type client: ~azure.search.documents._search_index_document_batching_client_base.SearchIndexDocumentBatchingClientBase
"""
class TestDocStringParser:
def _test_return_type(self, docstring, expected):
docstring_parser = DocstringParser(docstring)
assert expected == docstring_parser.find_return_type()
def _test_variable_type(self, docstring, varname, expected):
docstring_parser = DocstringParser(docstring)
assert expected == docstring_parser.find_type("(type|keywordtype|paramtype|vartype)", varname)
def _test_find_args(self, docstring, expected_args, is_keyword = False):
parser = DocstringParser(docstring)
expected = {}
for arg in expected_args:
expected[arg.argname] = arg
for arg in parser.find_args('keyword' if is_keyword else 'param'):
assert arg.argname in expected and arg.argtype == expected[arg.argname].argtype
def test_return_builtin_return_type(self):
self._test_return_type(docstring_standard_return_type, "str")
def test_return_union_return_type(self):
self._test_return_type(docstring_Union_return_type1, "Union[str, int]")
def test_return_union_return_type1(self):
self._test_return_type(docstring_Union_return_type2, "Union(str, int)")
def test_return_union_lower_case_return_type(self):
self._test_return_type(docstring_union_return_type3, "union[str, int]")
def test_multi_return_type(self):
self._test_return_type(docstring_multi_ret_type, "str or ~azure.test.testclass or None")
def test_dict_return_type(self):
self._test_return_type(docstring_dict_ret_type, "dict[str, int]")
def test_param_type(self):
self._test_variable_type(docstring_param_type, "val", "str")
def test_param_type_private(self):
self._test_variable_type(docstring_param_type_private, "client", "~azure.search.documents._search_index_document_batching_client_base.SearchIndexDocumentBatchingClientBase")
def test_params(self):
args = [ArgType("name", "str"), ArgType("val", "str")]
self._test_find_args(docstring_param_type, args)
def test_param_optional_type(self):
self._test_variable_type(docstring_param_type1, "pipe_id", "Union[str, int]")
def test_param_or_type(self):
self._test_variable_type(docstring_param_type1, "data", "str or ~azure.dummy.datastream")
self._test_variable_type(docstring_param_type1, "pipeline", None)
def test_type_typing_optional(self):
self._test_variable_type(docstring_param_typing_optional, "group", "typing.Optional[str]")
def test_nested_union_type(self):
self._test_variable_type(docstring_param_nested_union, "dummyarg", "typing.Union[~azure.eventhub.EventDataBatch, List[~azure.eventhub.EventData]]")
def test_multi_text_analytics_type(self):
self._test_variable_type(docstring_multi_complex_type, "documents", "list[str] or list[~azure.ai.textanalytics.DetectLanguageInput] or list[dict[str, str]]") |
lablup/sorna-agent | tests/docker/test_agent.py | Python | lgpl-3.0 | 6,239 | 0.000641 | import signal
from typing import (
Any,
Mapping,
)
from unittest.mock import AsyncMock
from aiodocker.exceptions import DockerError
from ai.backend.common.exception import ImageNotAvailable
from ai.backend.common.types import AutoPullBehavior
from ai.backend.common.docker import ImageRef
from ai.backend.agent.config import agent_local_config_iv
from ai.backend.agent.docker.agent import DockerAgent
import pytest
class DummyEtcd:
async def get_prefix(self, key: str) -> Mapping[str, Any]:
pass
@pytest.fixture
async def agent(test_id, redis_container, mocker):
dummy_etcd = DummyEtcd()
mocked_etcd_get_prefix = AsyncMock(return_value={})
mocker.patch.object(dummy_etcd, 'get_prefix', new=mocked_etcd_get_prefix)
agent = await DockerAgent.new(
dummy_etcd, agent_local_config_iv.check({
'agent': {
'mode': 'docker',
'id': f'i-{test_id}',
'scaling-group': f'sg-{test_id}',
},
'container': {
'scratch-type': 'hostdir',
'stats-type': 'docker',
'port-range': [19000, 19200],
},
'logging': {},
'resource': {},
'debug': {},
'etcd': {
'namespace': f'ns-{test_id}',
},
'redis': redis_container,
'plugins': {},
}),
stats_monitor=None,
error_monitor=None,
skip_initial_scan=True,
) # for faster test iteration
try:
yield agent
finally:
await agent.shutdown(signal.SIGTERM)
@pytest.mark.asyncio
async def test_init(agent, mocker):
print(agent)
imgref = ImageRef('index.docker.io/lablup/lua:5.3-alpine3.8', ['index.docker.io'])
query_digest = "sha256:b000000000000000000000000000000000000000000000000000000000000001"
digest_matching_image_info = {
"Id": "sha256:b000000000000000000000000000000000000000000000000000000000000001",
"RepoTags": [
"lablup/lua:5.3-alpine3.8"
],
}
digest_mismatching_image_info = {
"Id": "sha256:a000000000000000000000000000000000000000000000000000000000000002",
"RepoTags": [
"lablup/lua:5.3-alpine3.8"
],
}
@pytest.mark.asyncio
async def test_auto_pull_digest_when_digest_matching(agent, m | ocker):
behavior = AutoPullBehavior.DIGEST
inspect_mock = AsyncMock(return_value=digest_matching_image_info)
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_digest_when_digest_mismatching(agent, mocker):
behavior = AutoPullBehavior.DI | GEST
inspect_mock = AsyncMock(return_value=digest_mismatching_image_info)
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_digest_when_missing(agent, mocker):
behavior = AutoPullBehavior.DIGEST
inspect_mock = AsyncMock(
side_effect=DockerError(status=404,
data={'message': 'Simulated missing image'}))
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert pull
inspect_mock.assert_called_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_tag_when_digest_matching(agent, mocker):
behavior = AutoPullBehavior.TAG
inspect_mock = AsyncMock(return_value=digest_matching_image_info)
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_tag_when_digest_mismatching(agent, mocker):
behavior = AutoPullBehavior.TAG
inspect_mock = AsyncMock(return_value=digest_mismatching_image_info)
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_tag_when_missing(agent, mocker):
behavior = AutoPullBehavior.TAG
inspect_mock = AsyncMock(
side_effect=DockerError(status=404,
data={'message': 'Simulated missing image'}))
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert pull
inspect_mock.assert_called_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_none_when_digest_matching(agent, mocker):
behavior = AutoPullBehavior.NONE
inspect_mock = AsyncMock(return_value=digest_matching_image_info)
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_none_when_digest_mismatching(agent, mocker):
behavior = AutoPullBehavior.NONE
inspect_mock = AsyncMock(return_value=digest_mismatching_image_info)
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
pull = await agent.check_image(imgref, query_digest, behavior)
assert not pull
inspect_mock.assert_awaited_with(imgref.canonical)
@pytest.mark.asyncio
async def test_auto_pull_none_when_missing(agent, mocker):
behavior = AutoPullBehavior.NONE
inspect_mock = AsyncMock(
side_effect=DockerError(status=404,
data={'message': 'Simulated missing image'}))
mocker.patch.object(agent.docker.images, 'inspect', new=inspect_mock)
with pytest.raises(ImageNotAvailable) as e:
await agent.check_image(imgref, query_digest, behavior)
assert e.value.args[0] is imgref
inspect_mock.assert_called_with(imgref.canonical)
|
mLewisLogic/prism | prism/prism.py | Python | mit | 8,323 | 0.001802 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# (c) 2012 Mike Lewis
import logging; log = logging.getLogger(__name__)
from . import image_util
class Connection(object):
"""Enclosing object through which the system is managed"""
def __init__(self, s3_connection, bucket_name, bucket_url):
"""Set up the AWS creds and bucket details"""
self.s3 = s3_connection
self.bucket_name = bucket_name
self.bucket_url = bucket_url
@property
def bucket(self):
"""Get the S3 bucket being used by this manager"""
if not hasattr(self, '_bucket'):
self._bucket = self.s3.get_bucket(self.bucket_name)
return self._bucket
def save_image(self, key_name, image, format):
"""Saves this image to an S3 bucket, using a specified file format"""
log.debug(u'Saving key [{key_name} to {bucket}]'.format(
key_name=key_name,
bucket=self.bucket))
key = self.bucket.new_key(key_name)
format = format.upper()
if format == u'JPEG':
image_str = image_util.ImageHelper(image).to_jpeg_string()
key.set_metadata('Content-Type', 'image/jpeg')
elif format == u'PNG':
image_str = image_util.ImageHelper(image).to_png_string()
key.set_metadata('Content-Type', 'image/png')
else:
log.error(u'{0} is not a supported image format'.format(format))
return
return key.set_contents_from_string(image_str)
def delete_image(self, key):
"""Remove this image from S3"""
return self.bucket.delete_key(key)
def get_collection_manager(self, *args, **kwargs):
"""Gets a collection manager stemming from this connection"""
return CollectionManager(self, *args, **kwargs)
class CollectionManager(object):
"""Management object through which collections of images are processed using the same settings.
The collection_spec dictates the functionality that an instance will provide.
Parameters (with examples):
key_prefix=u'users/' # Allows "foldering" of different collections within the same bucket
format=u'JPEG', # format to save original and derivatives in
derivative_specs=[ # A list of specs to create derivative images
{
'filters': [ThumbnailFilter(120, 80)], # chained list of filters to apply
'key_suffix': u'(120x80)', # suffix to apply to the key, identifying this derivative
},
{
'filters': [ThumbnailFilter(20, 10)],
'key_suffix': u'(20x10)',
},
]
blacklist=[ # Blacklist of md5 hashes to ignore incoming images of
u'917aa09622f73d57a50294dde50cfdc8',
u'404b31849f87463d1b51284a0a1c6b65',
u'59610c7d0716126dc89c299bb92e4ca8',
u'49f83104c9a168a633314f64723ee7a5',
]
"""
def __init__(self, connection, key_prefix=u'', default_image=None, format=u'JPEG', derivative_specs=[], blacklist=[]):
"""Stash the parameters for use on individual processing"""
self.connection = connection
self.key_prefix = key_prefix
self.default_image = default_image
self.format = format
self.derivative_specs = derivative_specs
self.blacklist = blacklist
def process_image_string(self, image_string):
"""Process an image string"""
image = image_util.load_image_from_string(image_string)
return self.process_image(image)
def process_image_url(self, image_url):
"""Process an image url"""
image = image_util.load_image_from_url(image_url)
return self.process_image(image)
def process_image_file(self, image_file):
"""Process an image file"""
image = image_util.load_image_from_file(image_file)
return self.process_image(image)
def save_image(self, image, image_id):
"""Save this image to persistence"""
# Make sure we're playing with a valid image
if not image:
log.error(u'image is invalid: {0}'.format(image))
return None
key = self.id_to_key(image_id)
self.connection.save_image(key, image, self.format)
def process_derivatives(self, image, image_id, **kwargs):
"""Did your spec change? Make sure your derivatives are up to date"""
if not image:
log.error(u'image is invalid: {0}'.format(image))
return None
key = self.id_to_key(image_id)
derivs = dict()
for derivative_spec in self.derivative_specs:
deriv = self._save_derivative_image(key, image, derivative_spec, **kwargs)
if deriv:
derivs[de | rivative_spec.get('key_suffix')] = deriv
return derivs
def id_to_key(self, image_id):
"""Combines self.key_prefix with this id"""
return u'{ | key_prefix}{id}'.format(
key_prefix=self.key_prefix,
id=image_id)
def get_url(self, image_id):
"""Get the url, given this hash. Gets default if present and needed"""
key = image_id if image_id else self.default_image
if key:
return u'{bucket_url}{key}'.format(
bucket_url=self.connection.bucket_url,
key=self.id_to_key(key))
else:
return None
def get_image(self, image_id):
"""Get the actual image of this id"""
url = self.get_url(image_id)
return image_util.load_image_from_url(url) if url else None
def delete_image_by_id(self, image_id):
"""Removes this image and derivatives from S3"""
base_key = self.id_to_key(image_id)
self.connection.delete_image(base_key)
for spec in self.derivative_specs:
derivative_key = u'{base_key}{suffix}'.format(
base_key=base_key,
suffix=spec.get('key_suffix', u''))
self.connection.delete_image(derivative_key)
def _save_derivative_image(self, base_key, image, spec, force=False):
"""Generates and stores the derivative based upon a spec"""
derivative_key = u'{base_key}{suffix}'.format(
base_key=base_key,
suffix=spec.get('key_suffix', u''))
# If force or if key does not exist
if force or not self.connection.bucket.get_key(derivative_key):
derivative_image = self._apply_image_filters(image, spec['filters'])
self.connection.save_image(derivative_key, derivative_image, self.format)
return derivative_image
return None
def _apply_image_filters(self, image, filters=[]):
"""Creates a derivative image from an original using a filter chain (first-to-last)"""
derivative = image
for filter in filters:
derivative = filter(derivative)
return derivative
"""
Old-style
"""
def process_image(self, image, save_original=True):
"""Process this image according to this collection's spec"""
# Make sure we're playing with a valid image
if not image:
log.error(u'image is invalid: {0}'.format(image))
return None
# Get the md5 hash of the original image. We'll use this as the base s3 key.
hash = image_util.ImageHelper(image).md5_hash()
# Make sure this isn't in the blacklist
if hash in self.blacklist:
log.debug(u'image found in blacklist: {0}'.format(hash))
return None
key = self.id_to_key(hash)
# Store the original
if save_original:
self.connection.save_image(key, image, self.format)
# Process each requested derivative
for derivative_spec in self.derivative_specs:
self._save_derivative_image(key, image, derivative_spec)
# Return the image hash used
return hash
def reprocess_derivatives(self, hash, force=False):
"""Did your spec change? Make sure your derivatives are up to date"""
image = self.get_image(hash)
key = self.id_to_key(hash)
if image:
for derivative_spec in self.derivative_specs:
se |
manassolanki/erpnext | erpnext/stock/doctype/material_request/material_request_dashboard.py | Python | gpl-3.0 | 313 | 0.047923 | from frappe import _
def ge | t_data():
return {
'fieldname': 'material_request',
'transactions': [
{
'label': _('Related'),
'items': ['Request for Quotation', 'Supplier Quotation', 'Purchase Order', "Stock Entry"]
} | ,
{
'label': _('Manufacturing'),
'items': ['Work Order']
}
]
} |
kfr2/cmsplugin-biography | cmsplugin_biography/models.py | Python | mit | 2,213 | 0.001808 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from djangocms_text_ckeditor.fields import HTMLField
from easy_thumbnails.alias import aliases
from easy_thumbnails.signals import saved_file
from easy_thumbnails.signal_handlers import generate_aliases_global
# Define aliases for easy_thumbnail
# See http://easy-thumbnails.readthedocs.org/en/latest/usage/#thumbnail-aliases
if not aliases.get('badge'):
aliases.set('badge', {'size': (150, 80), 'crop': True})
class PersonBiography(models.Model):
"""Stores biographical information about a Person."""
first_name = models.CharField(max_length=200)
last_name = models.CharField(max_length=250)
suffix = models.CharField(max_length=40, blank=True)
title = models.CharField(max_length=250, blank=True)
employer = models.CharField(max_length=250, blank=True)
description = HTMLField()
image = models.ImageField(upload_to='biography_person', blank=True)
active = models.BooleanField(default=True,
help_text=_('If checked, this biography will be available in the plugin list.'))
class Meta:
ordering = ('last_name', 'first_name', )
verbose_name = 'Person biography'
verbose_name_plural = 'Person biographies'
def __unicode__(self):
return '%s, %s' % (self.last_name, self.first_name)
class PersonBiographyPluginModel(CMSPlugin):
"""
Stores a reference to a PersonBiography. This is used so a given
PersonBiography can be referenced by 0 or more PersonBiographyPlugins.
"""
person = models.ForeignKey(Per | sonBiography)
short_description = HTMLField(blank=True, help_text="If specified, this text will replace the person's normal description.")
event_descrip | tion = HTMLField(blank=True, help_text="If specified, this text will appear after the person's normal description.")
class Meta:
ordering = ('person', )
def __unicode__(self):
return unicode(self.person)
def copy_relations(self, oldinstance):
self.person = oldinstance.person
# Generate thumbnails when an image is uploaded.
saved_file.connect(generate_aliases_global)
|
mileistone/test | vedanet/engine/__init__.py | Python | mit | 336 | 0.008929 | """
Lightnet Engine Module |br|
This module contains classes and functions to manage the training of | your networks.
It has an engine, capable of orchestrating your train | ing and test cycles, and also contains function to easily visualise data with visdom_.
"""
#from .engine import *
from ._voc_train import *
from ._voc_test import *
|
jesmorc/Workinout | proyectoP4/urls.py | Python | gpl-3.0 | 1,273 | 0.004713 | # -*- coding: utf-8 -*-
"""proyectoP4 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, u | rl, patterns
from django.contrib import admin
from Workinout import views
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^Workinout/', include('Workinout.urls')), # ADD THIS NEW TUPLE!media/(?P<path>.*)
]
if settings.DEBUG:
| urlpatterns += patterns(
'django.views.static',
(r'media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), )
else:
urlpatterns += patterns('', url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_PATH}),
)
|
drayanaindra/django-shop | tests/testapp/manage.py | Python | bsd-3-clause | 609 | 0.004926 | #!/usr/bin/env python
import os, sys
from django.core.management import execute_manager
sys.path.insert(0, os.path.abspath('./../../'))
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appe | ars you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file_ | _)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
eclee25/flu-SDI-exploratory-age | scripts/WIPS2015/WIPS_Bansal2010_hierarchy.py | Python | mit | 1,282 | 0.024961 | #!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: 2/6/15
###Function: Redraw figure 4A in Shifting Demographic Landscape (Bansal2010)
###Import data:
###Command Line: python
##############################################
### notes ###
### packages/modules ###
import csv
import numpy as np
import matplotlib.pyplot as plt
## local modules ##
### data structures ###
### parameters ## | #
### functions ###
### import data ###
chi | ldin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/scripts/WIPS2015/importData/child_attack_rate.txt','r')
child=csv.reader(childin, delimiter=' ')
adultin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/scripts/WIPS2015/importData/adult_attack_rate.txt','r')
adult=csv.reader(adultin, delimiter=' ')
### program ###
childlist, adultlist = [],[]
ct=0
for item1, item2 in zip(child, adult):
childlist = reduce(item1, []).split()
adultlist = reduce(item2, []).split()
ct+=1
print ct
childtest = [float(c) for c in childlist]
adulttest = [float(a) for a in adultlist]
print childtest
print adulttest
plt.plot(childtest, color='red', lwd=3)
plt.lines(adulttest, color='blue', lwd=3)
plt.ylabel('Time')
plt.xlabel('Attack Rate')
plt.show() |
shubhdev/openedx | common/djangoapps/student/models.py | Python | agpl-3.0 | 67,408 | 0.002077 | """
Models for User Information (students, staff, etc)
Migration Notes
If you make changes to this model, be sure to create an appropriate migration
file and check it in at the same time as your model changes. To do that,
1. Go to the edx-platform dir
2. ./manage.py lms schemamigration student --auto description_of_your_change
3. Add the migration file created in edx-platform/common/djangoapps/student/migrations/
"""
from datetime import datetime, timedelta
import hashlib
import json
import logging
from pytz import UTC
import uuid
from collections import defaultdict, OrderedDict
import dogstats_wrapper as dog_stats_api
from urllib import urlencode
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.auth.hashers import make_password
from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.db import models, IntegrityError
from django.db.models import Count
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver, Signal
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_noop
from django_countries.fields import CountryField
from config_models.models import ConfigurationModel
from track import contexts
from eventtracking import tracker
from importlib import import_module
from south.modelsinspector import add_introspection_rules
from opaque_keys.edx.locations import SlashSeparatedCourseKey
import lms.lib.comment_client as cc
from util.model_utils import emit_field_changed_events, get_changed_fields_dict
from util.query import use_read_replica_if_available
from xmodule_django.models import CourseKeyField, NoneToEmptyManager
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.keys import CourseKey
from functools import total_ordering
from certificates.models import GeneratedCertificate
from course_modes.models import CourseMode
import analytics
UNENROLL_DONE = Signal(providing_args=["course_enrollment", "skip_refund"])
log = logging.getLogger(__name__)
AUDIT_LOG = lo | gging.getLogger("audit")
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore # pylint: disable=invalid-name
UNENROLLED_TO_ALLOWEDTOENROLL = 'from unenrolled to allowed to enroll'
ALLOWEDTOENROLL_TO_ENROLLED = 'from allowed to enroll to enrolled'
ENROLLED_TO_ENROLLED = 'from enrolled to enrolled'
ENROLLED_TO_UNENROLLED = 'from enrolled to unenrolled'
UNENROLLED_TO_ENROLLED = 'from une | nrolled to enrolled'
ALLOWEDTOENROLL_TO_UNENROLLED = 'from allowed to enroll to enrolled'
UNENROLLED_TO_UNENROLLED = 'from unenrolled to unenrolled'
DEFAULT_TRANSITION_STATE = 'N/A'
TRANSITION_STATES = (
(UNENROLLED_TO_ALLOWEDTOENROLL, UNENROLLED_TO_ALLOWEDTOENROLL),
(ALLOWEDTOENROLL_TO_ENROLLED, ALLOWEDTOENROLL_TO_ENROLLED),
(ENROLLED_TO_ENROLLED, ENROLLED_TO_ENROLLED),
(ENROLLED_TO_UNENROLLED, ENROLLED_TO_UNENROLLED),
(UNENROLLED_TO_ENROLLED, UNENROLLED_TO_ENROLLED),
(ALLOWEDTOENROLL_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED),
(UNENROLLED_TO_UNENROLLED, UNENROLLED_TO_UNENROLLED),
(DEFAULT_TRANSITION_STATE, DEFAULT_TRANSITION_STATE)
)
class AnonymousUserId(models.Model):
"""
This table contains user, course_Id and anonymous_user_id
Purpose of this table is to provide user by anonymous_user_id.
We generate anonymous_user_id using md5 algorithm,
and use result in hex form, so its length is equal to 32 bytes.
"""
objects = NoneToEmptyManager()
user = models.ForeignKey(User, db_index=True)
anonymous_user_id = models.CharField(unique=True, max_length=32)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
unique_together = (user, course_id)
def anonymous_id_for_user(user, course_id, save=True):
"""
Return a unique id for a (user, course) pair, suitable for inserting
into e.g. personalized survey links.
If user is an `AnonymousUser`, returns `None`
Keyword arguments:
save -- Whether the id should be saved in an AnonymousUserId object.
"""
# This part is for ability to get xblock instance in xblock_noauth handlers, where user is unauthenticated.
if user.is_anonymous():
return None
cached_id = getattr(user, '_anonymous_id', {}).get(course_id)
if cached_id is not None:
return cached_id
# include the secret key as a salt, and to make the ids unique across different LMS installs.
hasher = hashlib.md5()
hasher.update(settings.SECRET_KEY)
hasher.update(unicode(user.id))
if course_id:
hasher.update(course_id.to_deprecated_string().encode('utf-8'))
digest = hasher.hexdigest()
if not hasattr(user, '_anonymous_id'):
user._anonymous_id = {} # pylint: disable=protected-access
user._anonymous_id[course_id] = digest # pylint: disable=protected-access
if save is False:
return digest
try:
anonymous_user_id, __ = AnonymousUserId.objects.get_or_create(
defaults={'anonymous_user_id': digest},
user=user,
course_id=course_id
)
if anonymous_user_id.anonymous_user_id != digest:
log.error(
u"Stored anonymous user id %r for user %r "
u"in course %r doesn't match computed id %r",
user,
course_id,
anonymous_user_id.anonymous_user_id,
digest
)
except IntegrityError:
# Another thread has already created this entry, so
# continue
pass
return digest
def user_by_anonymous_id(uid):
"""
Return user by anonymous_user_id using AnonymousUserId lookup table.
Do not raise `django.ObjectDoesNotExist` exception,
if there is no user for anonymous_student_id,
because this function will be used inside xmodule w/o django access.
"""
if uid is None:
return None
try:
return User.objects.get(anonymoususerid__anonymous_user_id=uid)
except ObjectDoesNotExist:
return None
class UserStanding(models.Model):
"""
This table contains a student's account's status.
Currently, we're only disabling accounts; in the future we can imagine
taking away more specific privileges, like forums access, or adding
more specific karma levels or probationary stages.
"""
ACCOUNT_DISABLED = "disabled"
ACCOUNT_ENABLED = "enabled"
USER_STANDING_CHOICES = (
(ACCOUNT_DISABLED, u"Account Disabled"),
(ACCOUNT_ENABLED, u"Account Enabled"),
)
user = models.ForeignKey(User, db_index=True, related_name='standing', unique=True)
account_status = models.CharField(
blank=True, max_length=31, choices=USER_STANDING_CHOICES
)
changed_by = models.ForeignKey(User, blank=True)
standing_last_changed_at = models.DateTimeField(auto_now=True)
class UserProfile(models.Model):
"""This is where we store all the user demographic fields. We have a
separate table for this rather than extending the built-in Django auth_user.
Notes:
* Some fields are legacy ones from the first run of 6.002, from which
we imported many users.
* Fields like name and address are intentionally open ended, to account
for international variations. An unfortunate side-effect is that we
cannot efficiently sort on last names for instance.
Replication:
* Only the Portal servers should ever modify this information.
* All fields are replicated into relevant Course databases
Some of the fields are legacy ones that were captured during the initial
MITx fall prototype.
"""
class Meta: # pylint: disable=missing-docstring
db_table = "auth_userprofile"
# CRITICAL TODO/SECURITY
# Sanitize all fields.
# This is not visible to other users, but could introduce holes later
user = models.OneToOneField(User, unique=True, db_index=True, related_name='profile')
name = models. |
ariwaranosai/twitter_bot | twitter_bot/main.py | Python | gpl-2.0 | 1,342 | 0.003726 | #!/usr/bin/env python
#coding=utf-8
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See th | e License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import time
import dbcontroller as dc
import speak
import User
im | port logging
class MainHandler(webapp2.RequestHandler):
def get(self):
list = dc.refresh()
lines = speak.speak(list)
import twitter
for user in User.users:
for i in lines:
str1 = i
logging.log(logging.INFO, u"twitter length is " + \
str(len(str1)))
try:
twitter.sendMessage(str1)
except:
logging.log(logging.WARNING, u"twitter send fail:" + str1)
return self.response.out.write('ok')
app = webapp2.WSGIApplication([
('/whyisme', MainHandler)
], debug=True)
|
cloudzfy/euler | src/125.py | Python | mit | 555 | 0.001802 | # The palindromic number 595 is interesting because it can be written
# as the sum of consecutive squares: 6^2 + 7^2 + 8^2 + 9^2 + 10^2 +
# 11^2 + 12^2.
# There are exactly eleven palindromes below one-thousand th | at can
# be written as consecutive square sums, and the sum of these palindromes
# is 4164. Note that 1 = 0^2 + 1^2 has not been included as this problem
# is concerned with the squares of positive integers.
# Find the sum of all the numbers less than 10^8 that are both palindromic
# and can | be written as the sum of consecutive squares.
|
abogushov/django-admin-json-editor | example/app/migrations/0002_roles.py | Python | mit | 888 | 0.003378 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-11-25 13:37
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
class Migration(migrations.Migration):
depe | ndencies = [
('app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArrayJSONModel',
fields=[
| ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('roles', django.contrib.postgres.fields.jsonb.JSONField(default=[])),
],
),
migrations.AlterField(
model_name='jsonmodel',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default={'html': '<h1>Default</h1>', 'status': False, 'text': 'some text'}),
),
]
|
javiercantero/streamlink | src/streamlink/plugins/funimationnow.py | Python | bsd-2-clause | 11,232 | 0.002137 | from __future__ import print_function
import logging
import random
import re
from streamlink.compat import urljoin
from streamlink.plugin import Plugin, PluginArguments, PluginArgument
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.plugin.api.utils import itertags
from streamlink.stream import HLSStream
from streamlink.stream import HTTPStream
from streamlink.stream.ffmpegmux import MuxedStream
log = logging.getLogger(__name__)
class Experience(object):
CSRF_NAME = "csrfmiddlewaretoken"
login_url = "https://www.funimation.com/log-in/"
api_base = "https://www.funimation.com/api"
login_api_url = "https://prod-api-funimationnow.dadcdigital.com/api/auth/login/"
show_api_url = api_base + "/experience/{experience_id}/"
sources_api_url = api_base + "/showexperience/{experience_id}/"
languages = ["english", "japanese"]
alphas = ["uncut", "simulcast"]
login_schema = validate.Schema(validate.any(
{ | "success": False,
"error": validate.text},
{"token": validate.text,
"user": {"id": int}}
))
def __init_ | _(self, experience_id):
"""
:param experience_id: starting experience_id, may be changed later
"""
self.experience_id = experience_id
self._language = None
self.cache = {}
self.token = None
def request(self, method, url, *args, **kwargs):
headers = kwargs.pop("headers", {})
if self.token:
headers.update({"Authorization": "Token {0}".format(self.token)})
http.cookies.update({"src_token": self.token})
log.debug("Making {0}request to {1}".format("authorized " if self.token else "", url))
return http.request(method, url, *args, headers=headers, **kwargs)
def get(self, *args, **kwargs):
return self.request("GET", *args, **kwargs)
def post(self, *args, **kwargs):
return self.request("POST", *args, **kwargs)
@property
def pinst_id(self):
return ''.join([
random.choice("0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") for _ in range(8)
])
def _update(self):
api_url = self.show_api_url.format(experience_id=self.experience_id)
log.debug("Requesting experience data: {0}".format(api_url))
res = self.get(api_url)
data = http.json(res)
self.cache[self.experience_id] = data
@property
def show_info(self):
if self.experience_id not in self.cache:
self._update()
return self.cache.get(self.experience_id)
@property
def episode_info(self):
"""
Search for the episode with the requested experience Id
:return:
"""
for season in self.show_info["seasons"]:
for episode in season["episodes"]:
for lang in episode["languages"].values():
for alpha in lang["alpha"].values():
if alpha["experienceId"] == self.experience_id:
return episode
@property
def language(self):
for language, lang_data in self.episode_info["languages"].items():
for alpha in lang_data["alpha"].values():
if alpha["experienceId"] == self.experience_id:
return language
@property
def language_code(self):
return {"english": "eng", "japanese": "jpn"}[self.language]
def set_language(self, language):
if language in self.episode_info["languages"]:
for alpha in self.episode_info["languages"][language]["alpha"].values():
self.experience_id = alpha["experienceId"]
def _get_alpha(self):
for lang_data in self.episode_info["languages"].values():
for alpha in lang_data["alpha"].values():
if alpha["experienceId"] == self.experience_id:
return alpha
def subtitles(self):
alpha = self._get_alpha()
for src in alpha["sources"]:
return src["textTracks"]
def sources(self):
"""
Get the sources for a given experience_id, which is tied to a specific language
:param experience_id: int; video content id
:return: sources dict
"""
api_url = self.sources_api_url.format(experience_id=self.experience_id)
res = self.get(api_url, params={"pinst_id": self.pinst_id})
return http.json(res)
def login_csrf(self):
r = http.get(self.login_url)
for input in itertags(r.text, "input"):
if input.attributes.get("name") == self.CSRF_NAME:
return input.attributes.get("value")
def login(self, email, password):
log.debug("Attempting to login as {0}".format(email))
r = self.post(self.login_api_url,
data={'username': email, 'password': password, self.CSRF_NAME: self.login_csrf()},
raise_for_status=False,
headers={"Referer": "https://www.funimation.com/log-in/"})
d = http.json(r, schema=self.login_schema)
self.token = d.get("token", None)
return self.token is not None
class FunimationNow(Plugin):
arguments = PluginArguments(
PluginArgument(
"email",
argument_name="funimation-email",
requires=["password"],
help="Email address for your Funimation account."
),
PluginArgument(
"password",
argument_name="funimation-password",
sensitive=True,
help="Password for your Funimation account."
),
PluginArgument(
"language",
argument_name="funimation-language",
choices=["en", "ja", "english", "japanese"],
default="english",
help="""
The audio language to use for the stream; japanese or english.
Default is "english".
"""
),
PluginArgument(
"mux-subtitles",
argument_name="funimation-mux-subtitles",
action="store_true",
help="""
Enable automatically including available subtitles in to the output stream.
"""
)
)
url_re = re.compile(r"""
https?://(?:www\.)funimation(.com|now.uk)
""", re.VERBOSE)
experience_id_re = re.compile(r"/player/(\d+)")
mp4_quality = "480p"
@classmethod
def can_handle_url(cls, url):
return cls.url_re.match(url) is not None
def _get_streams(self):
http.headers = {"User-Agent": useragents.CHROME}
res = http.get(self.url)
# remap en to english, and ja to japanese
rlanguage = {"en": "english", "ja": "japanese"}.get(self.get_option("language").lower(),
self.get_option("language").lower())
if "_Incapsula_Resource" in res.text:
self.bypass_incapsula(res)
res = http.get(self.url)
id_m = self.experience_id_re.search(res.text)
experience_id = id_m and int(id_m.group(1))
if experience_id:
log.debug("Found experience ID: {0}", experience_id)
exp = Experience(experience_id)
if self.get_option("email") and self.get_option("password"):
if exp.login(self.get_option("email"), self.get_option("password")):
log.info("Logged in to Funimation as {0}", self.get_option("email"))
else:
log.warning("Failed to login")
log.debug("Found episode: {0}", exp.episode_info["episodeTitle"])
log.debug(" has languages: {0}", ", ".join(exp.episode_info["languages"].keys()))
log.debug(" requested language: {0}", rlanguage)
log.debug(" current language: {0}", exp.language)
if rlanguage != exp.language:
log.debug("switching language to: {0}", rlanguage)
exp.set_language(rlanguage)
if exp.language != rlanguage:
|
flav-io/flavio | flavio/physics/wdecays/test_mW.py | Python | mit | 729 | 0 | import unittest
import flavio
from . import mw
from flavio.physics.zdecays.test_smeftew import ZeroDict
import wilson
par = flavio.default_parameters.get_central_all()
|
class TestMW(unittest.TestCase):
def test_mW_SM(self):
self.assertAlmostEqual(mw.mW_SM(par), 80.3779, delta=0.02)
def test_shifts_sm(self):
C = ZeroDict({})
self.assertEqua | l(mw.dmW_SMEFT(par, C), 0)
def test_obs(self):
w = wilson.Wilson({}, scale=91.1876, eft='SMEFT', basis='Warsaw')
self.assertAlmostEqual(flavio.sm_prediction('m_W'),
par['m_W'], delta=0.03)
self.assertEqual(flavio.sm_prediction('m_W'),
flavio.np_prediction('m_W', w))
|
altur13/osu-Assistant | beatmap_processor.py | Python | gpl-3.0 | 1,817 | 0.004403 | #!/usr/bin/env python
import sys
from utils.file_system import *
from calculations.beatmap_metadata import BeatmapMetadat | a
from database.database_wrapper import DatabaseWrapper
def hello(msg):
print("Hello world! " | + msg)
def print_help():
print("Usage: ./beatmap_processor.py [songs_folder]")
print("This script initializes beatmap database with pp calculations")
print("of all standard maps located on [songs_folder]")
exit(1)
if (len(sys.argv) != 2):
print_help()
songs_abs_path = abs_path(sys.argv[1])
print(songs_abs_path)
files = list_files(songs_abs_path)
osu_files = [x for x in files if x.endswith(".osu")]
#print("=======================================")
#print(str(len(osu_files)) + " maps found!")
#for u in osu_files:
# print(u)
#print("=======================================")
if not dir_exists("thumbnails"):
mkdir("thumbnails")
db_wrapper = DatabaseWrapper()
added = 0
errors = 0
other_modes = 0
print("\n\nSTARTING PP CALCULATIONS.....\n")
for u in osu_files:
print("Calculating pp for '" + u + "'.....")
b = BeatmapMetadata(u)
if not b.is_standard_map():
print("Not a osu! standard map. Skipping...")
other_modes += 1
continue
success = b.calculate_diff_values()
if not success:
print("Beatmap has errors. Skipping...")
errors += 1
continue
b.path = path_subtraction(u, songs_abs_path)
if not db_wrapper.already_exists(b.beatmap_id): # not like this peppy....
db_wrapper.store_beatmap(b)
else:
print("Beatmap " + b.path + " already exists on the database; skipping...")
added += 1
print(str(added) + " beatmaps added to the database!")
print(str(errors) + " beatmaps with errors found!")
print(str(other_modes) + " beatmaps from other modes were skipped.")
|
tectronics/rebuildingtogethercaptain | room/forms.py | Python | apache-2.0 | 7,780 | 0.010154 | # Copyright 2011 Luke Stone
"""Django forms for models."""
# This don't work in appengine, we use djangoforms instead.
# from django.forms import ModelForm
from django import forms # DateField, DateTimeInput
from google.appengine.ext.db import djangoforms
import common
import models
VENDOR_SELECTIONS = (
'Home Depot',
'Kelly-Moore Paints',
'Palo Alto Hardware',
'Wisnom\'s ',
'Ocean Shore Hardware',
'AAA Rentals',
'San Mateo Rentals',
'Other (put name in Description)',
)
def DateField(label):
"""Helper to produce data fields for forms."""
return forms.DateField(
label=label, required=False,
help_text='mm/dd/yyyy',
widget=forms.DateTimeInput(attrs={'class':'input',
'size':'10'
},
format='%m/%d/%Y'
))
def SortedCaptainChoiceField():
return djangoforms.ModelChoiceField(
models.Captain,
models.Captain.all().order('name'))
def SortedSiteChoiceField():
return djangoforms.ModelChoiceField(
models.NewSite,
models.NewSite.all().order('number'))
class CaptainForm(djangoforms.ModelForm):
class Meta:
model = models.Captain
exclude = ['modified', 'last_editor', 'last_welcome', 'search_prefixes']
class CaptainContactForm(djangoforms.ModelForm):
class Meta:
model = models.Captain
exclude = ['name', 'email', 'modified', 'last_editor', 'last_welcome',
'search_prefixes']
class NewSiteForm(djangoforms.ModelForm):
number = forms.CharField(
max_length=10,
help_text = '"10001DAL" reads: 2010, #001, Daly City')
street_number = forms.CharField(
max_length=100,
help_text = models.NewSite.street_number.help_text)
city_state_zip = forms.CharField(
max_length=100,
help_text = models.NewSite.city_state_zip.help_text)
jurisdiction_choice = djangoforms.ModelChoiceField(
models.Jurisdiction, query=models.Jurisdiction.all().order('name'),
label='Jurisdiction')
class Meta:
model = models.NewSite
exclude = ['search_prefixes', 'program', 'jurisdiction', 'year']
class CaptainSiteForm(djangoforms.ModelForm):
work_start = DateField('Work Start Date')
work_end = DateField('Work End Date')
class Meta:
# TODO: is this the wrong model!?? should be NewSite?
model = models.Site
exclude = ['number', 'name', 'applicant', 'sponsors',
'postal_address', 'search_prefixes']
class SiteCaptainSiteForm(djangoforms.ModelForm):
captain = SortedCaptainChoiceField()
class Meta:
model = models.SiteCaptain
exclude = ['site']
class StaffForm(djangoforms.ModelForm):
since = DateField('Since')
class Meta:
model = models.Staff
exclude = ['user', 'last_welcome']
class SupplierForm(djangoforms.ModelForm):
since = DateField('Since')
class Meta:
model = models.Supplier
exclude = ['user']
class SupplierFormSimple(djangoforms.ModelForm):
class Meta:
model = models.Supplier
fields = ['name', 'address', 'phone']
class OrderSheetForm(djangoforms.ModelForm):
class Meta:
model = models.OrderSheet
exclude = ['created']
class ItemForm(djangoforms.ModelForm):
class Meta:
model = models.Item
exclude = ['last_editor', 'created', 'modified', 'thumbnail']
class OrderForm(djangoforms.ModelForm):
initial = {'pickup_on': common.NRD}
site = SortedSiteChoiceField()
class Meta:
model = models.Order
exclude = ['last_editor', 'created', 'created_by',
'modified', 'order_sheet',
'sub_total', 'sales_tax', 'grand_total', 'state',
'program']
class CaptainOrderForm(djangoforms.ModelForm):
pickup_on = DateField('Pickup On')
return_on = DateField('Return On')
class Meta:
model = models.Order
exclude = ['last_editor', 'created', 'created_by',
'modified', 'order_sheet',
'sub_total', 'sales_tax', 'grand_total', 'state',
'captain', 'site', 'program']
class NewOrderForm(djangoforms.ModelForm):
site = djangoforms.ModelChoiceField(
models.Site, widget=forms.HiddenInput)
order_sheet = djangoforms.ModelChoiceField(
models.OrderSheet, query=models.OrderSheet.all().order('name'))
class Meta:
model = models.Order
fields = ['site', 'order_sheet', 'program']
class DeliveryForm(djangoforms.ModelForm):
class Meta:
model = models.Delivery
exclude = ['site']
class PickupForm(djangoforms.ModelForm):
class Meta:
model = models.Pickup
exclude = ['site']
class RetrievalForm(djangoforms.ModelForm):
class Meta:
model = models.Retrieval
exclude = ['site']
class InventoryItemForm(djangoforms.ModelForm):
class Meta:
model = models.InventoryItem
exclude = ['last_editor', 'modified', 'item']
class SiteExpenseForm(djangoforms.ModelForm):
def __init__(self, *args, **kwargs):
staff = kwargs.pop('staff')
super(SiteExpenseForm, self).__init__(*args, **kwargs)
if staff:
self.fields['site'] = djangoforms.ModelChoiceField(
models.NewSite, models.NewSite.all().order('number'))
entity = kwargs.get('instance')
if entity:
site = entity.site
captains = [(sc.captain.key(), sc.captain.name)
for sc in site.sitecaptain_set]
captains.sort()
staff_captain = common.GetStaffCaptain()
captains.append((staff_captain.key(), staff_captain.name))
self.fields['captain'] = djangoforms.ModelChoiceField(
models.SiteCaptain,
choices=captains)
else:
self.fields['site'] = djangoforms.ModelChoiceField(
models.NewSite, widget=forms.HiddenInput)
self.fields['captain'] = djangoforms.ModelChoiceField(
models.Captain, widget=forms.HiddenInput)
class CheckRequestForm(SiteExpenseForm):
payment_date = forms.DateField(required=True)
name = forms.CharField(required=True, label='Payable To')
description = forms.CharField(
required=True, widget=forms.Textarea,
label=('Description '
'(Please include place of purchase and list of items purchased '
'and submit corresponding receipt.)')
)
class Meta:
model = models.CheckRequest
exclude = ['last_editor', 'modified', 'program']
class VendorReceiptForm(SiteExpenseForm):
purchase_date = forms.DateField(required=True)
supplier = djangoforms.ModelChoiceField(
models.Supplier,
query=models.Supplier.all().filter('active = ', 'Active').order('name'),
label="Vendor",
help_text="or add a new vendor using the form on the right")
amount = forms.FloatField(required=True)
class Meta:
model = models.VendorReceipt
exclude = ['last_editor', 'modifie | d', 'program', 'vendor']
class InKindDonationForm(SiteExpenseForm):
donation_date = forms.DateField(required=True)
donor = forms.CharField(required=True)
donor_phone = forms.CharField(required=True)
description = forms.CharField(required=True, widget=forms.Textarea)
class Meta:
model = models.InKindDonation
| exclude = ['last_editor', 'modified', 'program']
|
michaeldayreads/marathon-bigip-ctlr | common.py | Python | apache-2.0 | 6,679 | 0 | #!/usr/bin/env python3
#
# Copyright 2017 F5 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utility functions."""
import ipaddress
import re
import sys
import time
import json
import logging
import socket
import argparse
import jwt
import requests
from requests.auth import AuthBase
# Big-IP Address Pattern: <ipaddr>%<route_domain>
ip_rd_re = re.compile(r'^([^%]*)%(\d+)$')
def parse_log_level(log_level_arg):
"""Parse the log level from the args.
Args:
log_level_arg: String representation of log level
"""
LOG_LEVELS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
if log_level_arg not in LOG_LEVELS:
msg = 'Invalid option: {0} (Valid choices are {1})'.format(
log_level_arg, LOG_LEVELS)
raise argparse.ArgumentTypeError(msg)
log_level = getattr(logging, log_level_arg, logging.INFO)
return log_level
def setup_logging(logger, log_format, log_level):
"""Configure logging."""
logger.setLevel(log_level)
formatter = logging.Formatter(log_format) |
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(formatter)
logger. | addHandler(consoleHandler)
logger.propagate = False
def set_marathon_auth_args(parser):
"""Set the authorization for Marathon."""
parser.add_argument("--marathon-auth-credential-file",
env_var='F5_CC_MARATHON_AUTH',
help="Path to file containing a user/pass for "
"the Marathon HTTP API in the format of 'user:pass'.")
parser.add_argument("--dcos-auth-credentials",
env_var='F5_CC_DCOS_AUTH_CREDENTIALS',
help="DC/OS service account credentials")
parser.add_argument("--dcos-auth-token",
env_var='F5_CC_DCOS_AUTH_TOKEN',
help="DC/OS ACS Token")
return parser
class DCOSAuth(AuthBase):
"""DCOSAuth class.
Manage authorization credentials for DCOS
"""
def __init__(self, credentials, ca_cert, token):
"""Initialize DCOSAuth."""
if credentials:
creds = json.loads(credentials)
self.scheme = creds['scheme']
self.uid = creds['uid']
self.private_key = creds['private_key']
self.login_endpoint = creds['login_endpoint']
self.token = token
self.verify = False
self.auth_header = None
self.expiry = 0
if ca_cert:
self.verify = ca_cert
def __call__(self, auth_request):
"""Get the ACS token."""
if self.token:
self.auth_header = 'token=' + self.token
auth_request.headers['Authorization'] = self.auth_header
return auth_request
if not self.auth_header or int(time.time()) >= self.expiry - 10:
self.expiry = int(time.time()) + 3600
payload = {
'uid': self.uid,
# This is the expiry of the auth request params
'exp': int(time.time()) + 60,
}
token = jwt.encode(payload, self.private_key, self.scheme)
data = {
'uid': self.uid,
'token': token.decode('ascii'),
# This is the expiry for the token itself
'exp': self.expiry,
}
r = requests.post(self.login_endpoint,
json=data,
timeout=(3.05, 46),
verify=self.verify)
r.raise_for_status()
self.auth_header = 'token=' + r.cookies['dcos-acs-auth-cookie']
auth_request.headers['Authorization'] = self.auth_header
return auth_request
def get_marathon_auth_params(args):
"""Get the Marathon credentials."""
marathon_auth = None
if args.marathon_auth_credential_file:
with open(args.marathon_auth_credential_file, 'r') as f:
line = f.readline().rstrip('\r\n')
if line:
marathon_auth = tuple(line.split(':'))
elif args.dcos_auth_credentials or args.dcos_auth_token:
return DCOSAuth(args.dcos_auth_credentials, args.marathon_ca_cert,
args.dcos_auth_token)
if marathon_auth and len(marathon_auth) != 2:
print(
"Please provide marathon credentials in user:pass format"
)
sys.exit(1)
return marathon_auth
def set_logging_args(parser):
"""Add logging-related args to the parser."""
parser.add_argument("--log-format",
env_var='F5_CC_LOG_FORMAT',
help="Set log message format",
default="%(asctime)s %(name)s: %(levelname)"
" -8s: %(message)s")
parser.add_argument("--log-level",
env_var='F5_CC_LOG_LEVEL',
type=parse_log_level,
help="Set logging level. Valid log levels are: "
"DEBUG, INFO, WARNING, ERROR, and CRITICAL",
default='INFO')
return parser
ip_cache = dict()
def resolve_ip(host):
"""Get the IP address for a hostname."""
cached_ip = ip_cache.get(host, None)
if cached_ip:
return cached_ip
else:
try:
ip = socket.gethostbyname(host)
ip_cache[host] = ip
return ip
except socket.gaierror:
return None
def split_ip_with_route_domain(address):
u"""Return ip and route-domain parts of address
Input ip format must be of the form:
<ip_v4_or_v6_addr>[%<route_domain_id>]
"""
match = ip_rd_re.match(address)
if match:
ip = match.group(1)
route_domain = int(match.group(2))
else:
ip = address
route_domain = None
return ip, route_domain
def validate_bigip_address(address):
"""Verify the address is a valid Big-IP address"""
is_valid = True
try:
ip = split_ip_with_route_domain(address)[0]
ipaddress.ip_address(ip)
except Exception:
is_valid = False
return is_valid
|
ToonTownInfiniteRepo/ToontownInfinite | toontown/minigame/Purchase.py | Python | mit | 34,313 | 0.00204 | from PurchaseBase import *
from otp.nametag.NametagFloat2d import *
from otp.nametag import NametagGlobals
from direct.task.Task import Task
from toontown.toon import ToonHead
from toontown.toonbase import ToontownTimer
from direct.gui import DirectGuiGlobals as DGG
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.PythonUtil import Functor
from toontown.minigame import TravelGameGlobals
from toontown.distributed import DelayDelete
from toontown.toonbase import ToontownGlobals
import MinigameGlobals
COUNT_UP_RATE = 0.15
COUNT_UP_DURATION = 0.5
DELAY_BEFORE_COUNT_UP = 1.0
DELAY_AFTER_COUNT_UP = 1.0
COUNT_DOWN_RATE = 0.075
COUNT_DOWN_DURATION = 0.5
DELAY_AFTER_COUNT_DOWN = 0.0
DELAY_AFTER_CELEBRATE = 2.6
COUNT_SFX_MIN_DELAY = 0.034
COUNT_SFX_START_T = 0.079
OVERMAX_SFX_MIN_DELAY = 0.067
OVERMAX_SFX_START_T = 0.021
class Purchase(PurchaseBase):
notify = DirectNotifyGlobal.directNotify.newCategory('Purchase')
def __init__(self, toon, pointsArray, playerMoney, ids, states, remain, doneEvent, metagameRound = -1, votesArray = None):
PurchaseBase.__init__(self, toon, doneEvent)
self.ids = ids
self.pointsArray = pointsArray
self.playerMoney = playerMoney
self.states = states
self.remain = remain
self.tutorialMode = 0
self.metagameRound = metagameRound
self.votesArray = votesArray
self.voteMultiplier = 1
self.fsm.addState(State.State('reward', self.enterReward, self.exitReward, ['purchase']))
doneState = self.fsm.getStateNamed('done')
doneState.addTransition('reward')
self.unexpectedEventNames = []
self.unexpectedExits = []
self.setupUnexpectedExitHooks()
def load(self):
purchaseModels = loader.loadModel('phase_4/models/gui/purchase_gui')
PurchaseBase.load(self, purchaseModels)
interiorPhase = 3.5
self.bg = loader.loadModel('phase_%s/models/modules/toon_interior' % interiorPhase)
self.bg.setPos(0.0, 5.0, -1.0)
self.wt = self.bg.find('**/random_tc1_TI_wallpaper')
wallTex = loader.loadTexture('phase_%s/maps/wall_paper_a5.jpg' % interiorPhase)
self.wt.setTexture(wallTex, 100)
self.wt.setColorScale(0.8, 0.67, 0.549, 1.0)
self.bt = self.bg.find('**/random_tc1_TI_wallpaper_border')
wallTex = loader.loadTexture('phase_%s/maps/wall_paper_a5.jpg' % interiorPhase)
self.bt.setTexture(wallTex, 100)
self.bt.setColorScale(0.8, 0.67, 0.549, 1.0)
self.wb = self.bg.find('**/random_tc1_TI_wainscotting')
wainTex = loader.loadTexture('phase_%s/maps/wall_paper_b4.jpg' % interiorPhase)
self.wb.setTexture(wainTex, 100)
self.wb.setColorScale(0.473, 0.675, 0.488, 1.0)
self.playAgain = DirectButton(parent=self.frame, relief=None, scale=1.04, pos=(0.72, 0, -0.24), image=(purchaseModels.find('**/PurchScrn_BTN_UP'),
purchaseModels.find('**/PurchScrn_BTN_DN'),
purchaseModels.find('**/PurchScrn_BTN_RLVR'),
purchaseModels.find('**/PurchScrn_BTN_UP')), text=TTLocalizer.GagShopPlayAgain, text_fg=(0, 0.1, 0.7, 1), text_scale=0.05, text_pos=(0, 0.015, 0), image3_color=Vec4(0.6, 0.6, 0.6, 1), text3_fg=Vec4(0, 0, 0.4, 1), command=self.__handlePlayAgain)
self.backToPlayground = DirectButton(parent=self.frame, relief=None, scale=1.04, pos=(0.72, 0, -0.045), image=(purchaseModels.find('**/PurchScrn_BTN_UP'),
purchaseModels.find('**/PurchScrn_BTN_DN'),
purchaseModels.find('**/PurchScrn_BTN_RLVR'),
purchaseModels.find('**/PurchScrn_BTN_UP')), text=TTLocalizer.GagShopBackToPlayground, text_fg=(0, 0.1, 0.7, 1), text_scale=0.05, text_pos=(0, 0.015, 0), image3_color=Vec4(0.6, 0.6, 0.6, 1), text3_fg=Vec4(0, 0, 0.4, 1), command=self.__handleBackToPlayground)
self.timer = ToontownTimer.ToontownTimer()
self.timer.hide()
self.timer.posInTopRightCorner()
numAvs = 0
count = 0
localToonIndex = 0
for index in xrange(len(self.ids)):
avId = self.ids[index]
if avId == base.localAvatar.doId:
localToonIndex = index
if self.states[index] != PURCHASE_NO_CLIENT_STATE and self.states[index] != PURCHASE_DISCONNECTED_STATE:
numAvs = numAvs + 1
layoutList = (None,
(0,),
(0, 2),
(0, 1, 3),
(0, 1, 2, 3))
layout = layoutList[numAvs]
headFramePosList = (Vec3(0.105, 0, -0.384),
Vec3(0.105, 0, -0.776),
Vec3(0.85, 0, -0.555),
Vec3(-0.654, 0, -0.555))
AVID_INDEX = 0
LAYOUT_INDEX = 1
TOON_INDEX = 2
self.avInfoArray = [(base.localAvatar.doId, headFramePosList[0], localToonIndex)]
pos = 1
for index in xrange(len(self.ids)):
avId = self.ids[index]
if self.states[index] != PURCHASE_NO_CLIENT_STATE and self.states[index] != PURCHASE_DISCONNECTED_STATE:
if avId != base.localAvatar.doId:
if base.cr.doId2do.has_key(avId):
self.avInfoArray.append((avId, headFramePosList[layout[pos]], index))
pos = pos + 1
self.headFrames = []
for avInfo in self.avInfoArray:
av = base.cr.doId2do.get(avInfo[AVID_INDEX])
if av:
headFrame = PurchaseHeadFrame(av, purchaseModels)
headFrame.setAvatarState(self.states[avInfo[TOON_INDEX]])
headFrame.setPos(avInfo[LAYOUT_INDEX])
self.headFrames.append((avInfo[AVID_INDEX], headFrame))
purchaseModels.removeNode()
self.foreground = loader.loadModel('phase_3.5/models/modules/TT_A1')
self.foreground.setPos(12.5, -20, -5.5)
self.foreground.setHpr(180, 0, 0)
self.backgroundL = self.foreground.copyTo(hidden)
self.backgroundL.setPos(-12.5, -25, -5)
self.backgroundL.setHpr(180, 0, 0)
self.backgroundR = self.backgroundL.copyTo(hidden)
self.backgroundR.setPos(25, -25, -5)
self.backgroundR.setHpr(180, 0, 0)
streets = loader.loadModel('phase_3.5/models/modules/street_modules')
sidewalk = streets.find('**/street_sidewalk_40x40')
self.sidewalk = sidewalk.copyTo(hidden)
self.sidewalkR = sidewalk.copyTo(hidden)
self.sidewalkL = sidewalk.copyTo(hidden)
self.sidewalk.setPos(-20, -25, -5.5)
self.sidewalk.setColor(0.9, 0.6, 0.4)
self.sidewalkL.setPos(-40, -25, -5.5)
self.sidewalkL.setColor(0.9, 0.6, 0.4)
self.sidewalkR.setPos(0, -25, -5.5)
self.sidewalkR.setColor(0.9, 0.6, 0.4)
streets.removeNode()
doors = loader.loadModel('phase_4/models/modules/doors')
door = doors.find('**/door_single_square_ur_door')
self.door = door.copyTo(hidden)
self.door.setH(180)
self.door.setPos(0, -16.75, -5.5)
self.door.setScale(1.5, 1.5, 2.0)
self.door.setColor(1.0, 0.8, 0, 1)
doors.removeNode()
self.convertingVotesToBeansLabel = DirectLabel(text=TTLocalizer.TravelGameConvertingVotesToBeans, text_fg=VBase4(1, 1, 1, 1), relief=None, pos=(0.0, 0, -0.58), scale=0.075)
self.convertingVotesToBeansLabel.hide()
self.rewardDoubledJellybeanLabel = DirectLabel(text=TTLocalizer.PartyRewardDoubledJellybean, text_fg=(1.0, 0.125, 0.125, 1.0), text_shadow=(0, 0, 0, 1), relief=None, pos=(0.0, 0, -0.67), scale=0.08)
self. | rewardDoubledJellybeanLabel.hide()
self.countSound = base.loadSfx('phase_3.5/audio/sfx/tick_counter.ogg')
self.overMaxSound = base.loadSfx('phase_3.5/audio/sfx/AV_collision.ogg')
self.celebrateSound = base.loadSfx('phase_4/audio/sfx/MG_win.ogg')
return
def unload(self):
PurchaseBase.unload(self)
self.cleanupUnexpectedExitHooks()
self.bg.removeNode()
del self.bg
self.notify.debug('de | stroying head frames')
for headFrame in self.headFrames:
if not headFrame[1].isEmpty():
headFrame[1].reparentTo(hidden)
headFrame[1].des |
joaks1/PyMsBayes | pymsbayes/utils/tempfs.py | Python | gpl-3.0 | 4,956 | 0.004439 | #! /usr/bin/env python
import sys
import os
import tempfile
from pymsbayes.utils.functions import random_str
from pymsbayes.utils.errors import TempFSError
from pymsbayes.utils.messaging import get_logger
_LOG = get_logger(__name__)
class TempFileSystem(object):
"""
A temporary file system that protects against deleting any directories
or files that are not created by an instance of this class.
"""
def __init__(self, parent, prefix='pymsbayes-temp-'):
"""
Create TempFileSystem instance.
`parent` must be an existing directory and will be were the base
directory of the temp file system will be created.
"""
if not os.path.exists(parent):
raise TempFSError('{0!r} does not exist; cannot create '
'directory for temp files\n'.format(dest_dir))
if not os.path.isdir(parent):
raise TempFSError('{0!r} is not a directory; cannot create '
'directory for temp files\n'.format(dest_dir))
self.dirs= set()
self.files = set()
self.parent = self._get_full_path(parent)
self.prefix = prefix
self.base_dir = self._make_dir(parent=self.parent, prefix=self.prefix)
self.token_id = random_str()
self.deleted = False
def _get_full_path(self, path):
return os.path.abspath(os.path.expandvars(os.path.expanduser(path)))
def _make_dir(self, parent, prefix):
d = tempfile.mkdtemp(prefix=prefix, dir=parent)
self._register_dir(d)
return d
def _register_dir(self, path):
self.dirs.add(path)
def _get_file(self, parent, prefix, register = True):
prefix = '-'.join([self.token_id, prefix])
file_descriptor, path = tempfile.mkstemp(prefix=prefix, dir=parent)
os.close(file_descriptor)
if register:
self._register_file(path)
return path
def _register_file(self, path):
self.files.add(path)
def remove_file(self, path):
if path in self.files:
self.files.remove(path)
os.remove(path)
elif os.path.basename(path).startswith(self.token_id):
os.remove(path)
else:
raise TempFSError('File {0!r} is not registered; '
'cannot remove'.format(path))
def _check_parent(self, parent):
full_parent = self._get_full_path(parent)
if not os.path.exists(full_parent):
raise TempFSError('parent {0!r} does not exist'.format(full_parent))
if not os.path.isdir(full_parent):
raise TempFSError('parent {0!r} is not a directory'.format(
full_parent))
if not full_parent in self.dirs:
raise TempFSError('unregistered parent: {0}'.format(full_parent))
return full_parent
def get_file_path(self, parent=None, prefix='temp', register = True):
"""
Get temp file path within the temp directory.
`parent` must exist within temp file base directory and must
be registered with this TempFileSystem instance.
if `register` is True, the temp file is registered (i.e., it is added
to the instance's set of temp files).
"""
if parent is None:
parent = self.base_dir
full_parent = self._check_parent(parent)
return self._get_file(parent=full_parent, prefix=prefix, register = register)
def create_subdir(self, parent=None, prefix='temp'):
"""
Create a new subdirectory within the temp file system.
`parent` must exist within temp file base directory and must
be registered with this TempFileSystem instance.
"""
if parent is None:
pa | rent = self.base_dir
full_parent = self._check_parent(parent)
return self._make_dir(parent=full_parent, prefix=prefix)
def clear_dir(self, path):
full_path = self._get_full_path(path)
if not full | _path in self.dirs:
raise TempFSError('Temp directory {0!r} is not registered; '
'cannot clear'.format(full_path))
for p in os.listdir(full_path):
path = os.path.join(full_path, p)
if os.path.isfile(path):
self.remove_file(path)
else:
self.remove_dir(path)
def remove_dir(self, path):
full_path = self._get_full_path(path)
self.clear_dir(full_path)
self.dirs.remove(full_path)
try:
os.rmdir(full_path)
except OSError, e:
_LOG.warning('Could not remove temp directory {0}. Here are the '
'contents:\n{1}'.format(full_path,
'\n'.join(os.listdir(full_path))))
pass
if full_path == self.base_dir:
self.deleted = True
def purge(self):
self.remove_dir(self.base_dir)
|
spreg-git/pysal | pysal/spreg/tests/test_twosls.py | Python | bsd-3-clause | 14,374 | 0.018784 | import unittest
import numpy as np
import pysal
from pysal.spreg.twosls import BaseTSLS, TSLS
class TestBaseTSLS(unittest.TestCase):
def setUp(self):
db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
self.y = np.array(db.by_col("CRIME"))
self.y = np.reshape(self.y, (49,1))
self.X = []
self.X.append(db.by_col("INC"))
self.X = np.array(self.X).T
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.yd = []
self.yd.append(db.by_col("HOVAL"))
self.yd = np.array(self.yd).T
self.q = []
self.q.append(db.by_col("DISCBD"))
self.q = np.array(self.q).T
def test_basic(self):
reg = BaseTSLS(self.y, self.X, self.yd, self.q)
betas = np.array([[ 88.46579584], [ 0.5200379 ], [ -1.58216593]])
np.testing.assert_array_almost_equal(reg.betas, betas, 7)
h_0 = np.array([ 1. , 19.531, 5.03 ])
np.testing.assert_array_almost_equal(reg.h[0], h_0)
hth = np.array([[ 49. , 704.371999 , 139.75 ],
[ 704.371999 , 11686.67338121, 2246.12800625],
[ 139.75 , 2246.12800625, 498.5851 ]])
np.testing.assert_array_almost_equal(reg.hth, hth, 7)
hthi = np.array([[ 0.1597275 , -0.00762011, -0.01044191],
[-0.00762011, 0.00100135, -0.0023752 ],
[-0.01044191, -0.0023752 , 0.01563276]])
np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
self.assertEqual(reg.k, 3)
self.assertEqual(reg.kstar, 1)
self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
self.assertEqual(reg.n, 49)
pfora1a2 = np.array([[ 9.58156106, -0.22744226, -0.13820537],
[ 0.02580142, 0.08226331, -0.03143731],
[-3.13896453, -0.33487872, 0.20690965]])
np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
predy_5 = np.array([[-28.68949467], [ 28.99484984], [ 55.07344824], [ 38.266095 | 04], [ 57.5 | 7145851]])
np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
q_5 = np.array([[ 5.03], [ 4.27], [ 3.89], [ 3.7 ], [ 2.83]])
np.testing.assert_array_equal(reg.q[0:5], q_5)
self.assertAlmostEqual(reg.sig2n_k, 587.56797852699822, 7)
self.assertAlmostEqual(reg.sig2n, 551.5944288212637, 7)
self.assertAlmostEqual(reg.sig2, 551.5944288212637, 7)
self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
u_5 = np.array([[ 44.41547467], [-10.19309584], [-24.44666724], [ -5.87833504], [ -6.83994851]])
np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
self.assertAlmostEqual(reg.utu, 27028.127012241919, 7)
varb = np.array([[ 0.41526237, 0.01879906, -0.01730372],
[ 0.01879906, 0.00362823, -0.00184604],
[-0.01730372, -0.00184604, 0.0011406 ]])
np.testing.assert_array_almost_equal(reg.varb, varb, 7)
vm = np.array([[ 229.05640809, 10.36945783, -9.54463414],
[ 10.36945783, 2.0013142 , -1.01826408],
[ -9.54463414, -1.01826408, 0.62914915]])
np.testing.assert_array_almost_equal(reg.vm, vm, 7)
x_0 = np.array([ 1. , 19.531])
np.testing.assert_array_almost_equal(reg.x[0], x_0, 7)
y_5 = np.array([[ 15.72598 ], [ 18.801754], [ 30.626781], [ 32.38776 ], [ 50.73151 ]])
np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
yend_5 = np.array([[ 80.467003], [ 44.567001], [ 26.35 ], [ 33.200001], [ 23.225 ]])
np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
z_0 = np.array([ 1. , 19.531 , 80.467003])
np.testing.assert_array_almost_equal(reg.z[0], z_0, 7)
zthhthi = np.array([[ 1.00000000e+00, -1.66533454e-16, 4.44089210e-16],
[ 0.00000000e+00, 1.00000000e+00, 0.00000000e+00],
[ 1.26978671e+01, 1.05598709e+00, 3.70212359e+00]])
np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
def test_n_k(self):
reg = BaseTSLS(self.y, self.X, self.yd, self.q, sig2n_k=True)
betas = np.array([[ 88.46579584], [ 0.5200379 ], [ -1.58216593]])
np.testing.assert_array_almost_equal(reg.betas, betas, 7)
vm = np.array([[ 243.99486949, 11.04572682, -10.16711028],
[ 11.04572682, 2.13183469, -1.08467261],
[ -10.16711028, -1.08467261, 0.67018062]])
np.testing.assert_array_almost_equal(reg.vm, vm, 7)
def test_white(self):
reg = BaseTSLS(self.y, self.X, self.yd, self.q, robust='white')
betas = np.array([[ 88.46579584], [ 0.5200379 ], [ -1.58216593]])
np.testing.assert_array_almost_equal(reg.betas, betas, 7)
vm = np.array([[ 208.27139316, 15.6687805 , -11.53686154],
[ 15.6687805 , 2.26882747, -1.30312033],
[ -11.53686154, -1.30312033, 0.81940656]])
np.testing.assert_array_almost_equal(reg.vm, vm, 7)
def test_hac(self):
gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)
reg = BaseTSLS(self.y, self.X, self.yd, self.q, robust='hac', gwk=gwk)
betas = np.array([[ 88.46579584], [ 0.5200379 ], [ -1.58216593]])
np.testing.assert_array_almost_equal(reg.betas, betas, 7)
vm = np.array([[ 231.07254978, 15.42050291, -11.3941033 ],
[ 15.01376346, 1.92422887, -1.11865505],
[ -11.34381641, -1.1279227 , 0.72053806]])
np.testing.assert_array_almost_equal(reg.vm, vm, 7)
class TestTSLS(unittest.TestCase):
def setUp(self):
db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
self.y = np.array(db.by_col("CRIME"))
self.y = np.reshape(self.y, (49,1))
self.X = []
self.X.append(db.by_col("INC"))
self.X = np.array(self.X).T
self.yd = []
self.yd.append(db.by_col("HOVAL"))
self.yd = np.array(self.yd).T
self.q = []
self.q.append(db.by_col("DISCBD"))
self.q = np.array(self.q).T
def test_basic(self):
reg = TSLS(self.y, self.X, self.yd, self.q)
betas = np.array([[ 88.46579584], [ 0.5200379 ], [ -1.58216593]])
np.testing.assert_array_almost_equal(reg.betas, betas, 7)
h_0 = np.array([ 1. , 19.531, 5.03 ])
np.testing.assert_array_almost_equal(reg.h[0], h_0)
hth = np.array([[ 49. , 704.371999 , 139.75 ],
[ 704.371999 , 11686.67338121, 2246.12800625],
[ 139.75 , 2246.12800625, 498.5851 ]])
np.testing.assert_array_almost_equal(reg.hth, hth, 7)
hthi = np.array([[ 0.1597275 , -0.00762011, -0.01044191],
[-0.00762011, 0.00100135, -0.0023752 ],
[-0.01044191, -0.0023752 , 0.01563276]])
np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
self.assertEqual(reg.k, 3)
self.assertEqual(reg.kstar, 1)
self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
self.assertEqual(reg.n, 49)
pfora1a2 = np.array([[ 9.58156106, -0.22744226, -0.13820537],
[ 0.02580142, 0.08226331, -0.03143731],
[-3.13896453, -0.33487872, 0.20690965]])
np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
predy_5 = np.array([[-28.68949467], [ 28.99484984], [ 55.07344824], [ 38.26609504], [ 57.57145851]])
np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
q_5 = np.array([[ 5.03], [ 4.27], [ 3.89], [ 3.7 ], [ 2.83]])
np.testing.assert_array_equal(reg.q[0:5], q_5)
self.assertAlmostEqual(reg.sig2n_k, 587.56797852699822, 7)
self.assertAlmostEqual |
yamila-moreno/jannie-5 | examples/bottle-example.py | Python | gpl-3.0 | 520 | 0.003846 | # -* | - coding: utf-8 -*-
from time import sleep
import subprocess
def invoke_rest_api(cmd):
url = 'http://localhost:8080/{}'.format(cmd)
subprocess.call(['curl', url])
def with_api():
# start
invoke_rest_api('start')
invoke_rest_api('takeoff')
sleep(3)
# go forward
invoke_rest_api('go_forward')
sleep(4)
# go backward
invoke_rest_api('go_backward')
sleep(4)
# land
invoke_rest_api('land')
invoke_rest_api('stop')
if __name__ == "_ | _main__":
with_api()
|
faribas/RMG-Py | rmgpy/molecule/draw.py | Python | mit | 68,395 | 0.008217 | #!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2009-2011 by the RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module provides functionality for automatic two-dimensional drawing of the
`skeletal formulae <http://en.wi | kipedia.org/wiki/Skeletal_formula>`_ of a wide
variety of organic and inorganic molecules. The general method for creating
these drawings is to utilize the :meth:`draw()` method of the :class:`Molecule`
you wish to draw; this wraps a call to :meth:`MoleculeDrawer.draw()`, where the
molecule drawing algorithm begins. Advanced use may require use of the
:class:`MoleculeDrawer` class directly.
The `Cairo <http://cairographics.org/>`_ 2D graphics library i | s used to create
the drawings. The :class:`MoleculeDrawer` class module will fail gracefully if
Cairo is not installed.
The implementation uses the 2D coordinate generation of rdKit to find coordinates,
then uses Cairo to render the atom.
"""
import math
import numpy
import os.path
import re
import logging
from rmgpy.qm.molecule import Geometry
from rdkit.Chem import AllChem
from numpy.linalg import LinAlgError
################################################################################
def createNewSurface(format, path=None, width=1024, height=768):
"""
Create a new surface of the specified `type`: "png" for
:class:`ImageSurface`, "svg" for :class:`SVGSurface`, "pdf" for
:class:`PDFSurface`, or "ps" for :class:`PSSurface`. If the surface is to
be saved to a file, use the `path` parameter to give the path to the file.
You can also optionally specify the `width` and `height` of the generated
surface if you know what it is; otherwise a default size of 1024 by 768 is
used.
"""
import cairo
format = format.lower()
if format == 'png':
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, int(width), int(height))
elif format == 'svg':
surface = cairo.SVGSurface(path, width, height)
elif format == 'pdf':
surface = cairo.PDFSurface(path, width, height)
elif format == 'ps':
surface = cairo.PSSurface(path, width, height)
else:
raise ValueError('Invalid value "{0}" for type parameter; valid values are "png", "svg", "pdf", and "ps".'.format(type))
return surface
################################################################################
class MoleculeDrawer:
"""
This class provides functionality for drawing the skeletal formula of
molecules using the Cairo 2D graphics engine. The most common use case is
simply::
MoleculeDrawer().draw(molecule, format='png', path='molecule.png')
where ``molecule`` is the :class:`Molecule` object to draw. You can also
pass a dict of options to the constructor to affect how the molecules are
drawn.
"""
def __init__(self, options=None):
self.options = {
'fontFamily': 'sans',
'fontSizeNormal': 12,
'fontSizeSubscript': 8,
'bondLength': 24,
'padding': 2,
}
if options: self.options.update(options)
self.clear()
def clear(self):
self.molecule = None
self.cycles = None
self.ringSystems = None
self.coordinates = None
self.symbols = None
self.implicitHydrogens = None
self.left = 0.0
self.top = 0.0
self.right = 0.0
self.bottom = 0.0
self.surface = None
self.cr = None
def draw(self, molecule, format, path=None):
"""
Draw the given `molecule` using the given image `format` - pdf, svg, ps, or
png. If `path` is given, the drawing is saved to that location on disk. The
`options` dict is an optional set of key-value pairs that can be used to
control the generated drawing.
This function returns the Cairo surface and context used to create the
drawing, as well as a bounding box for the molecule being drawn as the
tuple (`left`, `top`, `width`, `height`).
"""
# The Cairo 2D graphics library (and its Python wrapper) is required for
# the molecule drawing algorithm
try:
import cairo
except ImportError:
print 'Cairo not found; molecule will not be drawn.'
return
# Make a copy of the molecule so we don't modify the original
self.molecule = molecule.copy(deep=True)
# Remove all unlabeled hydrogen atoms from the copied atoms and bonds, as
# they are not drawn
# However, if this would remove all atoms, then don't remove any
atomsToRemove = []
self.implicitHydrogens = {}
for atom in self.molecule.atoms:
if atom.isHydrogen() and atom.label == '': atomsToRemove.append(atom)
if len(atomsToRemove) < len(self.molecule.atoms):
for atom in atomsToRemove:
for atom2 in atom.bonds:
try:
self.implicitHydrogens[atom2] += 1
except KeyError:
self.implicitHydrogens[atom2] = 1
self.molecule.removeAtom(atom)
# Generate information about any cycles present in the molecule, as
# they will need special attention
self.__findRingGroups()
# Handle carbon monoxide special case
if self.molecule.toSMILES() == 'C#O':
# RDKit does not accept atom type Ot
self.molecule.removeAtom(self.molecule.atoms[-1])
self.symbols = ['CO']
self.coordinates = numpy.array([[0,0]], numpy.float64)
else:
# Generate the coordinates to use to draw the molecule
try:
self.__generateCoordinates()
# Generate labels to use
self.__generateAtomLabels()
except (ValueError, numpy.linalg.LinAlgError), e:
logging.error('Error while drawing molecule {0}: {1}'.format(molecule.toSMILES(), e))
import sys, traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exc()
return None, None, None
self.coordinates[:,1] *= -1
self.coordinates *= self.options['bondLength']
# Handle some special cases
if self.symbols == ['H','H']:
# Render as H2 instead of H-H
self.molecule.removeAtom(self.molecule.atoms[-1])
self.symbols = ['H2']
self.coordinates = numpy.array([[0,0]], numpy.float64)
elif self.symbols == ['O', 'O']:
# Render as O2 instead of O-O
self.molecule.removeAtom(self.molecule.atoms[-1])
self.molecule.atoms[0].radicalElectrons = 0
self.s |
ariovistus/pyd | examples/rawexample/setup.py | Python | mit | 268 | 0.011194 | from pyd.supp | ort import setup, Extension
projName = 'rawexample'
setup(
name=projName,
version='0.1',
ext_modules=[
Extension(projName, [ | 'rawexample.d'],
raw_only=True,
build_deimos=True,
d_lump=True)
],
)
|
zachriggle/alpha3 | ALPHA3.py | Python | bsd-3-clause | 11,046 | 0.021637 | # Copyright (c) 2003-2010, Berend-Jan "SkyLined" Wever <berendjanwever@gmail.com>
# Project homepage: http://code.google.com/p/alpha3/
# All rights reserved. See COPYRIGHT.txt for details.
import charsets, encode, io
import x86, x64, test
import os, re, sys
#_______________________________________________________________________________________________________________________
#
# ,sSSs,,s, ,sSSSs, : ALPHA3 - Alphanumeric shellcode encoder.
# dS" Y$P" YS" ,SY : Version 1.0 alpha
# iS' dY ssS" : Copyright (C) 2003-2009 by SkyLined.
# YS, dSb SP, ;SP : <berendjanwever@gmail.com>
# `"YSS'"S' "YSSSY" : http://skypher.com/wiki/index.php/ALPHA3
#_______________________________________________________________________________________________________________________
#
_settings = {
"architecture": None,
"character encoding": None,
"case": None
}
_default_settings = {
"architecture": "x86",
"character encoding": "ascii",
"case": "mixedcase"
}
_valid_settings = {
"case": charsets.valid_character_casings,
"character encoding": charsets.valid_character_encodings,
"architecture": ["x86", "x64"]
}
_arguments = {
"base address": None
}
_switches = {
"input": None,
"output": None
}
_flags = {
"verbose": 0,
"help": 0,
"test": 0,
"int3": 0
}
encoders = [];
import print_functions;
from print_functions import *
def ParseCommandLine():
global _settings, _arguments, _switches, _flags;
# Parse settings, arguments, switches and flags from the command line:
if len(sys.argv) == 1:
_flags["help"] = 1;
else:
for i in range(1, len(sys.argv)):
arg = sys.argv[i];
if arg[:2] == "--":
end_switch_name = arg.find("=");
if end_switch_name != -1:
switch_name = arg[2:end_switch_name];
switch_value = arg[end_switch_name + 1:];
for valid_switch_name in _switches:
if switch_name == valid_switch_name:
_switches[switch_name] = switch_value;
break;
else:
print >>sys.stderr, "Unknown switch '%s'!" % arg[2:];
return False;
else:
flag_name = arg[2:]
for valid_flag_name in _flags:
if flag_name == valid_flag_name:
_flags[flag_name] += 1;
break
else:
print >>sys.stderr, "Unknown flag '%s'!" % valid_flag_name;
return False;
else:
for setting_name in _valid_settings:
if arg in _valid_settings[setting_name]:
_settings[setting_name] = arg;
break;
else:
for argument_name in _arguments:
if _arguments[argument_name] == None:
_arguments[argument_name] = arg;
break;
else:
print >>sys.stderr, "Unknown _arguments: %s." % repr(arg);
return False;
return True;
def PrintLogo():
PrintInfo([
(None, "____________________________________________________________________________"),
(None, """ ,sSSs,,s, ,sSSSs, ALPHA3 - Alphanumeric shellcode encoder."""),
(None, """ dS" Y$P" YS" ,SY Version 1.0 alpha"""),
(None, """ iS' dY ssS" Copyright (C) 2003-2009 by SkyLined."""),
(None, """ YS, dSb SP, ;SP <berendjanwever@gmail.com>"""),
(None, """ `"YSS'"S' "YSSSY" http://skypher.com/wiki/index.php/ALPHA3"""),
(None, "____________________________________________________________________________"),
]);
def PrintHelp():
PrintInfo([
(None, "[Usage]"),
(" ", "ALPHA3.py [ encoder settings | I/O settings | flags ]"),
(None, ""),
(None, "[Encoder setting]"),
(" architecture ", "Which processor architecture to target (x86, x64)."),
(" character encoding ", "Which character encoding to use (ascii, cp437, latin-1, utf-16)."),
(" casing ", | "Which character casing to use (uppercase, mixedcase, lowercase)."),
(" base address ", "How to determine the base address in the decoder code (each encoder has its own set of "
"valid values)."),
(None, ""),
(None | , "[I/O Setting]"),
(" --input=\"file\"", "Path to a file that contains the shellcode to be encoded (Optional, default is to read "
"input from stdin)."),
(" --output=\"file\"", "Path to a file that will receive the encoded shellcode (Optional, default is to write "
"output to stdout)."),
(None, ""),
(None, "[Flags]"),
(" --verbose", "Display verbose information while executing. Use this flag twice to output progress "
"during encoding."),
(" --help", "Display this message and quit."),
(" --test", "Run all available tests for all encoders. (Useful while developing/testing new "
"encoders)."),
(" --int3", "Trigger a breakpoint before executing the result of a test. (Use in combination with "
"--test)."),
(None, ""),
(None, "[Notes]"),
(" ", "You can provide encoder settings in combination with the --help and --test switches to filter which "
"encoders you get help information for and which get tested, respectively.")
]);
def Main():
# Print header
if _flags["help"]:
# Print the main help body before displaying encoder specific help:
PrintLogo();
PrintWrappedLine();
PrintHelp();
PrintWrappedLine();
encoding = False;
elif not _flags["test"]:
if _flags["verbose"]:
PrintLogo();
encoding = True;
else:
if _flags["verbose"]:
PrintLogo();
PrintWrappedLine();
# We're testing our encoders
encoding = False;
# Print the _settings provided by the user and if we're encoding shellcode, set and print the default _settings
# for anything not provided:
if _flags["verbose"]:
for name in _settings:
if _settings[name] is not None:
PrintInfo([(name, _settings[name])]);
elif encoding:
_settings[name] = _default_settings[name];
PrintInfo([(name, _settings[name] + " (default)")]);
for name in _arguments:
if _arguments[name] is not None:
PrintInfo([(name, _arguments[name])]);
# If the user wants to encode shellcode, it needs to be read from stdin or a file:
if encoding:
if _switches["input"] is not None:
shellcode = io.ReadFile(_switches["input"]);
else:
shellcode = sys.stdin.read();
# Scan all encoders to see which match the given _settings/_arguments and take action:
results = [];
errors = False;
help_results = {};
at_least_one_encoder_found = False;
for encoder_settings in encoders:
for name in _settings:
if not name in encoder_settings:
raise AssertionError("One of the encoders is missing the '%s' setting: %s" % (name, encoder_settings["name"]));
if _settings[name] != None and _settings[name] != encoder_settings[name]:
# This _settings is specified but does not match this encoders _settings: skip the encoder.
break;
else: # All _settings match
# Check "base address" argument:
if (_arguments["base address"] is None or
re.match(encoder_settings["base address"], _arguments["base address"], re.IGNORECASE)):
at_least_one_encoder_found = True;
if _flags["t |
X-Scapin/BlueP | resources/demoprojects/Demo1/car.py | Python | apache-2.0 | 418 | 0.002392 | import sys
from vehicule import Vehicule
class Car(Vehicule):
""" New class Voiture"""
wheel_numbers = 4
def __init__(self, name): |
self.name = name
self.doors = {}
self.default_message = "Hello everybody !"
def print_value(self, sender):
"""DocString : this method print the default value of this insta | nce"""
print(sender + " say : " + self.default_message)
|
icyblade/pynga | setup.py | Python | mit | 1,018 | 0 | from setuptools import setup
about = {}
with open('./pynga/__version__.py', 'r') as f:
exec(f.read(), about)
with open('README.md', 'r') as f:
readme = f.read()
tests_require = [
'pytest>=3.5.0,<3.7.0',
'pytest-flake8>=1.0.0'
]
setup(
name=about['__title__'],
version=about['__version__'],
packages=['pynga'],
url=about['__url__'],
license=about['__license__'],
author=about['__author__'],
author_email=about['__author_ema | il__'],
description=about['__description__'],
long_description=readme,
long_description_content_type='text/markdown',
classifiers=[
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
install_requires=[
'requests>=2.10.0',
'cachecontrol>=0.12.0',
'beautifulsoup4>=4.0.0',
'urllib3>=1.18',
'pytz>=2017.2',
],
tes | ts_require=tests_require,
extras_require={
'test': tests_require,
},
python_requires='>=3.6',
)
|
mohamedhagag/community-addons | crm_change_request/models/change_request.py | Python | agpl-3.0 | 3,485 | 0 | # -*- encoding: utf-8 -*-
from openerp.osv import osv, fields
class LeadToChangeRequestWizard(osv.TransientModel):
"""
wizard to convert a Lead into a Change Request and move the Mail Thread
"""
_name = "crm.lead2cr.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one(
"crm.lead", "Lead", domain=[("type", "=", "lead")]
),
# "project_id": fields.many2one("project.project", "Project"),
"change_category_id": fields.many2one(
"change.management.category", "Change Category"
),
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def action_lead_to_change_req | uest(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
lead_obj = self.pool["crm.lead"]
cr_obj = self.pool["change.management.change"]
attachment_obj = self.pool['ir.attachment']
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
| partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = lead_obj.handle_partner_assignation(
cr, uid, [lead.id], context=context
)
partner = partner_ids[lead.id]
# create new change request
vals = {
"description": lead.name,
"description_event": lead.description,
"email_from": lead.email_from,
"project_id": lead.project_id.id,
"stakeholder_id": partner,
"author_id": uid,
"change_category_id": wizard.change_category_id.id,
}
change_id = cr_obj.create(cr, uid, vals, context=None)
change = cr_obj.browse(cr, uid, change_id, context=None)
# move the mail thread
lead_obj.message_change_thread(
cr, uid, lead.id, change_id,
"change.management.change", context=context
)
# Move attachments
attachment_ids = attachment_obj.search(
cr, uid,
[('res_model', '=', 'crm.lead'), ('res_id', '=', lead.id)],
context=context
)
attachment_obj.write(
cr, uid, attachment_ids,
{'res_model': 'change.management.change', 'res_id': change_id},
context=context
)
# Archive the lead
lead_obj.write(
cr, uid, [lead.id], {'active': False}, context=context)
# delete the lead
# lead_obj.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new CR
view_id = self.pool.get('ir.ui.view').search(
cr, uid,
[
('model', '=', 'change.management.change'),
('name', '=', 'change_form_view')
]
)
return {
'name': 'CR created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'change.management.change',
'type': 'ir.actions.act_window',
'res_id': change_id,
'context': context
}
|
datahuborg/datahub | src/core/db/backend/pg.py | Python | mit | 45,439 | 0.000044 | import re
import os
import errno
import shutil
import hashlib
from collections import namedtuple
from uuid import uuid4
import psycopg2
import core.db.query_rewriter
from psycopg2.extensions import AsIs
from psycopg2.pool import ThreadedConnectionPool
from psycopg2 import errorcodes
from core.db.licensemanager import LicenseManager
from core.db.errors import PermissionDenied
from config import settings
"""
DataHub internal APIs for postgres repo_base
"""
HOST = settings.DATABASES['default']['HOST']
PORT = 5432
if settings.DATABASES['default']['PORT'] != '':
try:
PORT = int(settings.DATABASES['default']['PORT'])
except:
pass
# Maintain a separate db connection pool for each (user, password, database)
# tuple.
connection_pools = {}
PoolKey = namedtuple('PoolKey', 'user, password, repo_base')
def _pool_for_credentials(user, password, repo_base, create_if_missing=True):
pool_key = PoolKey(user, password, repo_base)
# Create a new pool if one doesn't exist or if the existing one has been
# closed. Normally a pool should only be closed during testing, to force
# all hanging connections to a database to be closed.
if pool_key not in connection_pools or connection_pools[pool_key].closed:
if create_if_missing is False:
return None
# Maintains at least 1 connection.
# Raises "PoolError: connection pool exausted" if a thread tries
# holding onto than 10 connections to a single database.
connection_pools[pool_key] = ThreadedConnectionPool(
0,
10,
user=user,
password=password,
host=HOST,
port=PORT,
database=repo_base)
return connection_pools[pool_key]
def _close_all_connections(repo_base):
for key, pool in connection_pools.iteritems():
if repo_base == key.repo_base and not pool.closed:
pool.closeall()
def _convert_pg_exception(e):
# Convert some psycopg2 errors into exceptions meaningful to
# Django.
if (e.pgcode == errorcodes.INSUFFICIE | NT_PRIVILEGE):
raise PermissionDenied()
if (e.pgcode == errorcodes.INVALID_PARAMETER_VALUE or
e.pgcode == errorcodes.UNDEFINED_OBJECT):
raise ValueError("Invalid parameter in query.")
if e.pgcod | e == errorcodes.INVALID_SCHEMA_NAME:
error = ('Repo not found. '
'You must specify a repo in your query. '
'i.e. select * from REPO_NAME.TABLE_NAME. ')
raise LookupError(error)
if e.pgcode == errorcodes.UNDEFINED_TABLE:
raise LookupError("Table or view not found.")
if e.pgcode == errorcodes.DUPLICATE_SCHEMA:
raise ValueError("A repo with that name already exists.")
if e.pgcode == errorcodes.DUPLICATE_TABLE:
raise ValueError("A table with that name already exists.")
raise e
class PGBackend:
def __init__(self, user, password, host=HOST, port=PORT, repo_base=None):
self.user = user
self.password = password
self.host = host
self.port = port
self.repo_base = repo_base
self.connection = None
# row level security is enabled unless the user is a superuser
self.row_level_security = bool(
user != settings.DATABASES['default']['USER'])
# We only need a query rewriter if RLS is enabled
if self.row_level_security:
self.query_rewriter = core.db.query_rewriter.SQLQueryRewriter(
self.repo_base, self.user)
self.__open_connection__()
def __del__(self):
self.close_connection()
def __open_connection__(self):
pool = _pool_for_credentials(self.user, self.password, self.repo_base)
self.connection = pool.getconn()
self.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
def change_repo_base(self, repo_base):
self.close_connection()
self.repo_base = repo_base
self.__open_connection__()
def close_connection(self):
pool = _pool_for_credentials(self.user, self.password, self.repo_base,
create_if_missing=False)
if self.connection and pool and not pool.closed:
pool.putconn(self.connection, close=True)
self.connection = None
def _check_for_injections(self, noun):
"""
Raises ValueError if the proposed noun is invalid.
Valid nouns contain only alphanumeric characters and underscores, and
must not begin or end with an underscore.
"""
invalid_noun_msg = (
"Usernames and repo names may only contain "
"alphanumeric characters and underscores, must begin with a "
"letter, and must not begin or end with an underscore."
)
regex = r'^(?![\_\d])[\w\_]+(?<![\_])$'
valid_pattern = re.compile(regex)
matches = valid_pattern.match(noun)
if matches is None:
raise ValueError(invalid_noun_msg)
def _validate_table_name(self, noun):
"""
Raises ValueError if the proposed table name is invalid.
Valid table names contain only alphanumeric characters and underscores.
"""
invalid_noun_msg = (
"Table names may only contain "
"alphanumeric characters and underscores, must begin with a "
"letter, and must not begin or end with an underscore."
)
regex = r'^(?![\d])[\w\_]+(?<![\_])$'
valid_pattern = re.compile(regex)
matches = valid_pattern.match(noun)
if matches is None:
raise ValueError(invalid_noun_msg)
def create_repo(self, repo):
"""Creates a postgres schema for the user."""
self._check_for_injections(repo)
query = 'CREATE SCHEMA IF NOT EXISTS %s AUTHORIZATION %s'
params = (AsIs(repo), AsIs(self.user))
res = self.execute_sql(query, params)
return res['status']
def list_repos(self):
query = ('SELECT schema_name AS repo_name '
'FROM information_schema.schemata '
'WHERE schema_owner != %s')
params = (settings.DATABASES['default']['USER'],)
res = self.execute_sql(query, params)
return [t[0] for t in res['tuples']]
def rename_repo(self, repo, new_name):
self._check_for_injections(repo)
self._check_for_injections(new_name)
query = 'ALTER SCHEMA %s RENAME TO %s'
params = (AsIs(repo), AsIs(new_name))
res = self.execute_sql(query, params)
return res['status']
def delete_repo(self, repo, force=False):
"""Deletes a repo and the folder the user's repo files are in."""
self._check_for_injections(repo)
# drop the schema
query = 'DROP SCHEMA %s %s'
params = (AsIs(repo), AsIs('CASCADE') if force is True else AsIs(''))
res = self.execute_sql(query, params)
return res['status']
def add_collaborator(
self, repo, collaborator, db_privileges=[], license_id=None):
# check that all repo names, usernames, and privileges passed aren't
# sql injections
self._check_for_injections(repo)
self._check_for_injections(collaborator)
for privilege in db_privileges:
self._check_for_injections(privilege)
if license_id:
query = (
'BEGIN;'
'GRANT USAGE ON SCHEMA %s TO %s;'
'COMMIT;')
privileges_str = ', '.join(db_privileges)
params = [repo, collaborator, privileges_str, repo,
collaborator, repo, privileges_str, collaborator]
params = tuple(map(lambda x: AsIs(x), params))
res = self.execute_sql(query, params)
return res['status']
else:
query = ('BEGIN;'
'GRANT USAGE ON SCHEMA %s TO %s;'
'GRANT %s ON ALL TABLES IN SCHEMA %s TO %s;'
'ALTER DEFAULT PRIVILEGES IN SCHEMA %s '
'GRANT %s ON TABLES TO % |
kitchenbudapest/vr | hud.py | Python | gpl-3.0 | 4,413 | 0.019035 | ## INFO ########################################################################
## ##
## plastey ##
## ======= ##
## ##
## Oculus Rift + Leap Motion + Python 3 + C + Blender + Arch Linux ##
## Version: 0.2.0.980 (20150510) ##
## File: hud.py ##
## ##
## For more information about the project, visit ##
## <http://plastey.kibu.hu>. ##
## Copyright (C) 2015 Peter Varo, Kitchen Budapest ##
## ##
## This program is free software: you can redistribute it and/or modify it ##
## under the terms of the GNU General Public License as published by the ##
## Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, but ##
## WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ##
## See the GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program, most likely a file in the root directory, ##
## called 'LICENSE'. If not, see <http://www.gnu.org/licenses>. ##
## | ##
######################################################################## INFO ##
# Import python modules
from collections import deque
#------------------------------------------------------------------------------#
class Text:
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def __init__(self, text_first_object,
text_other_object,
time_getter,
interval):
self._text_first = text_first_o | bject
self._text_other = text_other_object
self._get_time = time_getter
self._interval = interval
self._last_time = time_getter()
self._messages = deque()
self._still_empty = True
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def _update(self):
# Write the changed and constructed messages to display
messages = iter(self._messages)
try:
self._text_first.text = next(messages)
self._text_other.text = '\n'.join(messages)
except StopIteration:
self._text_first.text = self._text_other.text = ''
# Update timer
self._last_time = self._get_time()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def clear(self):
self._messages = deque()
self._update()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def update(self):
# If there are any messages left
if len(self._messages):
# If interval passed
if (self._last_time + self._interval) <= self._get_time():
# Remove oldest item
self._messages.pop()
# Update display
self._update()
# If deque just become empty
elif not self._still_empty:
# Switch state flag and update display
self._still_empty = True
self._update()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def write(self, message):
# Add new message and update display
self._messages.appendleft(message)
self._update()
|
consultit/p3opensteer | samples/python/map_drive.py | Python | lgpl-3.0 | 11,669 | 0.004113 | '''
Created on Jun 26, 2016
@author: consultit
'''
from panda3d.core import TextNode, ClockObject, AnimControlCollection, \
auto_bind, LPoint3f, LVecBase3f, TextureStage, TexGenAttrib
from p3opensteer import OSSteerManager, ValueList_string, ValueList_LPoint3f, \
ValueList_float, OSSteerPlugIn
#
from common import startFramework, toggleDebugFlag, toggleDebugDraw, mask, \
loadTerrain, printCreationParameters, handleVehicleEvent, \
changeVehicleMaxForce, changeVehicleMaxSpeed, getVehicleModelAnims, \
animRateFactor, writeToBamFileAndExit, readFromBamFile, bamFileName, \
getCollisionEntryFromCamera, obstacleFile, HandleObstacleData, \
handleObstacles, HandleVehicleData, handleVehicles, loadPlane, \
loadTerrainLowPoly
import sys, random
# # specific data/functions declarations/definitions
sceneNP = None
vehicleAnimCtls = []
steerPlugIn = None
steerVehicles = []
rttTexStage = None
#
def setParametersBeforeCreation():
"""set parameters as strings before plug-ins/vehicles creation"""
steerMgr = OSSteerManager.get_global_ptr()
valueList = ValueList_string()
# set plug-in type
steerMgr.set_parameter_value(OSSteerManager.STEERPLUGIN, "plugin_type",
"map_drive")
# set vehicle's type, mass, speed
steerMgr.set_parameter_value(OSSteerManager.STEERVEHICLE, "vehicle_type",
"map_driver")
steerMgr.set_parameter_value(OSSteerManager.STEERVEHICLE, "max_speed",
"20.0")
steerMgr.set_parameter_value(OSSteerManager.STEERVEHICLE, "max_force",
"8.0")
steerMgr.set_parameter_value(OSSteerManager.STEERVEHICLE, "up_axis_fixed",
"true")
# set vehicle throwing events
valueList.clear()
valueList.add_value(
"avoid_obstacle@avoid_obstacle@1.0:path_following@path_following@1.0")
steerMgr.set_parameter_values(OSSteerManager.STEERVEHICLE,
"thrown_events", valueList)
#
printCreationParameters()
def updatePlugIn(steerPlugIn, task):
"""custom update task for plug-ins"""
global steerVehicles, vehicleAnimCtls
# call update for plug-in
dt = ClockObject.get_global_clock().get_dt()
steerPlugIn.update(dt)
# handle vehicle's animation
for i in range(len(vehicleAnimCtls)):
# get current velocity size
currentVelSize = steerVehicles[i].get_speed()
if currentVelSize > 0.0:
if currentVelSize < 4.0:
animOnIdx = 0
else:
animOnIdx = 1
animOffIdx = (animOnIdx + 1) % 2
# Off anim (0:walk, 1:run)
if vehicleAnimCtls[i][animOffIdx].is_playing():
vehicleAnimCtls[i][animOffIdx].stop()
# On amin (0:walk, 1:run)
vehicleAnimCtls[i][animOnIdx].set_play_rate(currentVelSize / animRateFactor[animOnIdx])
if not vehicleAnimCtls[i][animOnIdx].is_playing():
vehicleAnimCtls[i][animOnIdx].loop(True)
else:
# stop any animation
vehicleAnimCtls[i][0].stop()
vehicleAnimCtls[i][1].stop()
#
return task.cont
def debugDrawToTexture():
"debug draw to texture"
global steerPlugIn, sceneNP, app
steerPlugIn.debug_drawing_to_texture(sceneNP, app.win)
def onTextureReady(data, texture):
"debug drawing texture is ready"
global sceneNP
rttTexStage = data
# set up texture where to render
sceneNP.clear_texture(rttTexStage)
rttTexStage.set_mode(TextureStage.M_modulate)
# take into account sceneNP dimensions
sceneNP.set_tex_offset(rttTexStage, 0.5, 0.5)
sceneNP.set_ | tex_scale(rttTexStage, 1.0 / 128.0, 1.0 / 128.0)
sceneNP.set_tex_gen(rttTexStage, TexGenAttrib.M_world_position)
sceneNP.set_texture(rttTexStage, texture, 10)
def togglePredictionType():
"""toggle prediction type"""
global steerPlugIn
predictionType = steerPlugIn.get_map_prediction_type()
if predictionType == OSSteerPlugIn.CURV | ED_PREDICTION:
steerPlugIn.set_map_prediction_type(OSSteerPlugIn.LINEAR_PREDICTION)
print ("prediction type: linear")
else:
steerPlugIn.set_map_prediction_type(OSSteerPlugIn.CURVED_PREDICTION)
print ("prediction type: curved")
if __name__ == '__main__':
msg = "'map drive'"
app = startFramework(msg)
# # here is room for your own code
# print some help to screen
text = TextNode("Help")
text.set_text(
msg + "\n\n"
"- press \"d\" to toggle debug drawing\n"
"- press \"o\"/\"shift-o\" to add/remove obstacle\n"
"- press \"t\" to (re)draw the map of the path\n"
"- press \"a\" to add vehicle\n"
"- press \"p\" to toggle map prediction type\n")
textNodePath = app.aspect2d.attach_new_node(text)
textNodePath.set_pos(0.25, 0.0, 0.8)
textNodePath.set_scale(0.035)
# create a steer manager; set root and mask to manage 'kinematic' vehicles
steerMgr = OSSteerManager(app.render, mask)
# print creation parameters: defult values
print("\n" + "Default creation parameters:")
printCreationParameters()
# load or restore all scene stuff: if passed an argument
# try to read it from bam file
if (not len(sys.argv) > 1) or (not readFromBamFile(sys.argv[1])):
# no argument or no valid bamFile
# reparent the reference node to render
steerMgr.get_reference_node_path().reparent_to(app.render)
# get a sceneNP, naming it with "SceneNP" to ease restoring from bam
# file
sceneNP = loadTerrainLowPoly("SceneNP", 64, 24)
# and reparent to the reference node
sceneNP.reparent_to(steerMgr.get_reference_node_path())
# set the texture stage used for debug draw texture
rttTexStage = TextureStage("rttTexStage")
# set sceneNP's collide mask
sceneNP.set_collide_mask(mask)
# set creation parameters as strings before plug-in/vehicles creation
print("\n" + "Current creation parameters:")
setParametersBeforeCreation()
# create the plug-in (attached to the reference node)
plugInNP = steerMgr.create_steer_plug_in()
steerPlugIn = plugInNP.node()
# set the pathway
pointList = ValueList_LPoint3f()
radiusList = ValueList_float()
pointList.add_value(LPoint3f(-41.80, 34.46, -0.17))
radiusList.add_value(7.0)
pointList.add_value(LPoint3f(-2.21, 49.15, -0.36))
radiusList.add_value(8.0)
pointList.add_value(LPoint3f(10.78, 16.65, 0.14))
radiusList.add_value(9.0)
pointList.add_value(LPoint3f(40.44, 17.58, -0.22))
radiusList.add_value(9.0)
pointList.add_value(LPoint3f(49.04, -22.15, -0.60))
radiusList.add_value(8.0)
pointList.add_value(LPoint3f(13.99, -52.70, 0.39))
radiusList.add_value(8.0)
pointList.add_value(LPoint3f(-3.46, -31.90, 0.71))
radiusList.add_value(7.0)
pointList.add_value(LPoint3f(-30.0, -39.97, -0.35))
radiusList.add_value(6.0)
pointList.add_value(LPoint3f(-47.12, -17.31, -0.43))
radiusList.add_value(6.0)
pointList.add_value(LPoint3f(-51.31, 9.08, -0.25))
radiusList.add_value(7.0)
steerPlugIn.set_pathway(pointList, radiusList, False, True)
# make the map
steerPlugIn.make_map(200)
else:
# valid bamFile
# restore plug-in: through steer manager
steerPlugIn = OSSteerManager.get_global_ptr().get_steer_plug_in(0)
# restore sceneNP: through panda3d
sceneNP = OSSteerManager.get_global_ptr().get_reference_node_path().find("**/SceneNP")
# reparent the reference node to render
OSSteerManager.get_global_ptr().get_reference_node_path().reparent_to(app.render)
# restore the texture stage used for debug draw texture
rttTexStage = sceneNP.find_all_texture_stages().find_texture_stage(
"rttTexStage")
if not rttTexStage:
rttTexStage = TextureStag |
MehdiSfr/tensor-flow | tensorflow/python/training/moving_averages.py | Python | apache-2.0 | 11,235 | 0.002225 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Maintain moving averages of parameters."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
# TODO(touts): switch to variables.Variable.
def assign_moving_average(variable, value, decay, name=None):
"""Compute the moving average of a variable.
The moving average of 'variable' updated with 'value' is:
variable * decay + value * (1 - decay)
The returned Operation sets 'variable' to the newly computed moving average.
The new value of 'variable' can be set with the 'AssignSub' op as:
variable -= (1 - decay) * (variable - value)
Args:
variable: A Variable.
value: A tensor with the same shape as 'variable'
decay: A float Tensor or float value. The moving average decay.
name: Optional name of the returned operation.
Returns:
An Operation that updates 'variable' with the newly computed
moving average.
"""
| with ops.op_scope([variable, value, decay], name, "AssignMovingAvg") as name:
with ops.device(variable.device):
decay = ops.convert_to_tensor(1.0 - decay, name="decay")
if decay.dtype != variable.dtype.base_dtype:
decay = math_ops.cast(decay, variable.dtype.base_dtype)
return state_ops.assign_sub(variable, (variable - value) * decay,
name=name)
class Expo | nentialMovingAverage(object):
"""Maintains moving averages of variables by employing an exponential decay.
When training a model, it is often beneficial to maintain moving averages of
the trained parameters. Evaluations that use averaged parameters sometimes
produce significantly better results than the final trained values.
The `apply()` method adds shadow copies of trained variables and add ops that
maintain a moving average of the trained variables in their shadow copies.
It is used when building the training model. The ops that maintain moving
averages are typically run after each training step.
The `average()` and `average_name()` methods give access to the shadow
variables and their names. They are useful when building an evaluation
model, or when restoring a model from a checkpoint file. They help use the
moving averages in place of the last trained values for evaluations.
The moving averages are computed using exponential decay. You specify the
decay value when creating the `ExponentialMovingAverage` object. The shadow
variables are initialized with the same initial values as the trained
variables. When you run the ops to maintain the moving averages, each
shadow variable is updated with the formula:
`shadow_variable -= (1 - decay) * (shadow_variable - variable)`
This is mathematically equivalent to the classic formula below, but the use
of an `assign_sub` op (the `"-="` in the formula) allows concurrent lockless
updates to the variables:
`shadow_variable = decay * shadow_variable + (1 - decay) * variable`
Reasonable values for `decay` are close to 1.0, typically in the
multiple-nines range: 0.999, 0.9999, etc.
Example usage when creating a training model:
```python
# Create variables.
var0 = tf.Variable(...)
var1 = tf.Variable(...)
# ... use the variables to build a training model...
...
# Create an op that applies the optimizer. This is what we usually
# would use as a training op.
opt_op = opt.minimize(my_loss, [var0, var1])
# Create an ExponentialMovingAverage object
ema = tf.train.ExponentialMovingAverage(decay=0.9999)
# Create the shadow variables, and add ops to maintain moving averages
# of var0 and var1.
maintain_averages_op = ema.apply([var0, var1])
# Create an op that will update the moving averages after each training
# step. This is what we will use in place of the usuall trainig op.
with tf.control_dependencies([opt_op]):
training_op = tf.group(maintain_averages_op)
...train the model by running training_op...
```
There are two ways to use the moving averages for evaluations:
* Build a model that uses the shadow variables instead of the variables.
For this, use the `average()` method which returns the shadow variable
for a given variable.
* Build a model normally but load the checkpoint files to evaluate by using
the shadow variable names. For this use the `average_name()` method. See
the [Saver class](../../api_docs/python/train.md#Saver) for more
information on restoring saved variables.
Example of restoring the shadow variable values:
```python
# Create a Saver that loads variables from their saved shadow values.
shadow_var0_name = ema.average_name(var0)
shadow_var1_name = ema.average_name(var1)
saver = tf.train.Saver({shadow_var0_name: var0, shadow_var1_name: var1})
saver.restore(...checkpoint filename...)
# var0 and var1 now hold the moving average values
```
@@__init__
@@apply
@@average_name
@@average
"""
def __init__(self, decay, num_updates=None,
name="ExponentialMovingAverage"):
"""Creates a new ExponentialMovingAverage object.
The `Apply()` method has to be called to create shadow variables and add
ops to maintain moving averages.
The optional `num_updates` parameter allows one to tweak the decay rate
dynamically. . It is typical to pass the count of training steps, usually
kept in a variable that is incremented at each step, in which case the
decay rate is lower at the start of training. This makes moving averages
move faster. If passed, the actual decay rate used is:
`min(decay, (1 + num_updates) / (10 + num_updates))`
Args:
decay: Float. The decay to use.
num_updates: Optional count of number of updates applied to variables.
name: String. Optional prefix name to use for the name of ops added in
`Apply()`.
"""
self._decay = decay
self._num_updates = num_updates
self._name = name
self._averages = {}
def apply(self, var_list=None):
"""Maintains moving averages of variables.
`var_list` must be a list of `Variable` or `Tensor` objects. This method
creates shadow variables for all elements of `var_list`. Shadow variables
for `Variable` objects are initialized to the variable's initial value.
For `Tensor` objects, the shadow variables are initialized to 0.
shadow variables are created with `trainable=False` and added to the
`GraphKeys.ALL_VARIABLES` collection. They will be returned by calls to
`tf.all_variables()`.
Returns an op that updates all shadow variables as described above.
Note that `apply()` can be called multiple times with different lists of
variables.
Args:
var_list: A list of Variable or Tensor objects. The variables
and Tensors must be of types float32 or float64.
Returns:
An Operation that updates the moving averages.
Raises:
TypeError: If the arguments are not all float32 or float64.
ValueError: If the moving average of one of the variables is already
being computed.
"""
# TODO(touts): op_scope
if var_list is None:
|
Everley1993/Laky-Earo | test/test_diagram.py | Python | apache-2.0 | 4,567 | 0.003065 | #!/usr/bin/python
# -*- coding:utf-8 -*-
import unittest
from earo.event import Event, Field
from earo.handler import Handler, Emittion, NoEmittion
from earo.mediator import Mediator
from earo.context import Context
from earo.processor import Processor, ProcessFlow
from earo.diagram import Diagram
class TestDiagram(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_active_process_flow(self):
mediator = Mediator()
processor = Processor('.+')
class EventA(Event):
event_a_field = Field(int, 100);
class EventB(Event):
event_b_field = Field(str, 'hello');
class EventC(Event):
event_c_field = Field(float, 1.1);
class EventD(Event):
event_d_field = Field(dict, {'x': 3, 'y': 4});
class EventE(Event):
event_e_field = Field(list, [3, 8, 7]);
def fooA_BC(context, event):
import time
time.sleep(0.5)
return (Emittion(EventB()), NoEmittion(EventC, 'Test No Emmittion EventC'))
def fooA(context, event):
pass
def fooB_D(context, event):
return Emittion(EventD())
def fooC(context, event):
pass
def fooD(context, event):
1 / 0
handler_1 = Handler(EventA, fooA_BC, [EventB, EventC])
handler_2 = Handler(EventA, fooA)
handler_3 = Handler(EventB, fooB_D, [EventD])
handler_4 = Handler(EventC, fooC)
handler_5 = Handler(EventD, fooD)
mediator.register_event_handler(
handler_1,
handler_2,
handler_3,
handler_4,
handler_5
)
context = Context(mediator, EventA(), processor)
context.process()
process_flow = context.process_flow
diagram = Diagram(process_flow=process_flow)
self.assertIsNotNone(diagram.json)
def test_inactive_process_flow(self):
mediator = Mediator()
class EventA(Event):
event_a_field = Field(int, 100);
class EventB(Event):
event_b_field = Field(str, 'hello');
class EventC(Event):
event_c_field = Field(float, 1.1);
class EventD(Event):
event_d_field = Field(dict, {'x': 3, 'y': 4});
def fooBC(context, event):
return (Emittion(EventB()), Emittion(EventC()))
def fooD(context, event):
return Emittion(EventD())
def foo(context, event):
pass
def fooEx(context, event):
1 / 0
handler_1 = Handler(EventA, fooBC, [EventB, EventC])
handler_2 = Handler(EventA, foo)
handler_3 = Handler(EventB, fooD, [EventD])
handler_4 = Handler(EventC, foo)
handler_5 = Handler(EventD, fooEx)
mediator.register_event_handler(
handler_1,
handler_2,
handler_3,
handler_4,
handler_5
)
process_flow = ProcessFlow(mediator, EventA)
diagram = Diagram(process_flow=process_flow)
self.assertIsNotNone(diagram.json)
def test_json(self):
mediator = Mediator()
class EventA(Event):
event_a_field = Field(int, 100);
class EventB(Event):
event_b_field = Field(str, 'hello');
class EventC(Event):
event_c_field = Field(float, 1.1);
class EventD(Event):
event_d_field = Field(dict, {'x': 3, 'y': 4});
def fooBC(context, event):
return (Emittion(EventB()), Emittion(EventC()))
def fooD(context, event):
return Emittion(EventD())
def foo(context, event):
pass
def fooEx(context, event):
1 / 0
handler_1 = Handler(EventA, fooBC, [EventB, EventC])
handler_2 = Handler(EventA, foo)
handler_3 = Handler(EventB, fooD, [Event | D])
handler_4 = Handler(EventC, foo)
handler_5 = Handler(EventD, fooEx)
mediator.register_event_handler(
handler_1,
handler_2,
handler_3,
handler_4,
handler_5
)
process_flow = ProcessFlow(mediator, EventA)
diagram_from_process_flow = Diagram(process_flow=process_flow)
json = diagram_from_process_flow.json
diagram_from_json = Diagram(json=json)
self.assertIsNotNon | e(diagram_from_json.json)
if __name__ == '__main__':
unittest.main()
|
sa2ajj/DistroTracker | pts/mail/management/commands/pts_dispatch.py | Python | gpl-2.0 | 2,039 | 0 | # Copyright 2013 The Distro Tracker Developers
# See the COPYRIGHT file at the top-level directory of this distribution and
# at http://deb.li/DTAuthors
#
# This file is part of Distro Tracker. It is subject to the license terms
# in the LICENSE file found in the top-level directory of this
# distribution and at http://deb.li/DTLicense. No part of Distro Tracker,
# including this file, may be copied, modified, propagated, or distributed
# except according to the terms contained in the LICENSE file.
"""
Implements the management command which invokes the dispatch functionality.
"""
from django.core.management.base import BaseCommand
from django.utils import six
from pts.mail import dispatch
import | os
import sys
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
A Djan | go management command used to invoke the dispatch functionality.
The received message is expected as input on stdin.
"""
input_file = sys.stdin
def handle(self, *args, **kwargs):
logger.info('Processing a received package message')
if six.PY3:
self.input_file = self.input_file.detach()
input_data = self.input_file.read()
sent_to = self._get_to_address()
dispatch.process(input_data, sent_to)
logger.info('Completed processing a received package message')
def _get_to_address(self):
"""
Gets the envelope To address. The To address in the message cannot be
used to determine to which package the mail was sent.
This method tries to get the address from environment variables set by
the MTA. Both Postfix and Exim are supported.
"""
sent_to = os.environ.get('LOCAL_PART')
if sent_to:
# Exim
sent_to = '{local_part}@{domain}'.format(
local_part=sent_to,
domain=os.environ.get('DOMAIN'))
else:
# Try Postfix
sent_to = os.environ.get('ORIGINAL_RECIPIENT')
return sent_to
|
afg984/pyardrone | pyardrone/navdata/options.py | Python | mit | 14,941 | 0 | '''
This module corresponds to ARDroneLib/Soft/Common/navdata_common.h
'''
import ctypes
import functools
from pyardrone.utils.structure import Structure
uint8_t = ctypes.c_uint8
uint16_t = ctypes.c_uint16
uint32_t = ctypes.c_uint32
int16_t = ctypes.c_int16
int32_t = ctypes.c_int32
bool_t = ctypes.c_uint32 # ARDroneTool's bool is 4 bytes
char = ctypes.c_char
float32_t = ctypes.c_float
NB_GYROS = 3
NB_ACCS = 3
NB_NAVDATA_DETECTION_RESULTS = 4
NB_CORNER_TRACKERS_WIDTH = 5
NB_CORNER_TRACKERS_HEIGHT = 4
DEFAULT_NB_TRACKERS_WIDTH = NB_CORNER_TRACKERS_WIDTH + 1
DEFAULT_NB_TRACKERS_HEIGHT = NB_CORNER_TRACKERS_HEIGHT + 1
NAVDATA_MAX_CUSTOM_TIME_SAVE = 20
_vector31_t = float32_t * 3
_velocities_t = _vector31_t
_vector21_t = float32_t * 2
_screen_point_t = int32_t * 2
_matrix33_t = float32_t * 3 * 3
class OptionHeader(dict):
def register(self, tag):
return functools.partial(self._register, tag)
def _register(self, tag, function):
if tag in self:
raise KeyError('Key {!r} conflict with existing item {}'.format(
tag, self[tag]))
self[tag] = function
return function
index = OptionHeader()
class Metadata(Structure):
'''
Header of :py:class:`~pyardrone.navdata.NavData`.
Available via :py:class:`~pyardrone.navdata.NavData`.metadata
Corresponds to C struct ``navdata_t``.
'''
_pack_ = 1
_attrname = 'metadata'
header = uint32_t #: Should be 0x55667788
#: raw drone state,
#: see also: :py:class:`~pyardrone.navdata.states.DroneState`
state = uint32_t
sequence_number = uint32_t #:
vision_flag = uint32_t #:
class OptionHeader(Structure):
_pack_ = 1
tag = uint16_t
size = uint16_t
@index.register(0)
class Demo(OptionHeader):
'''
Minimal navigation data for all flights.
Corresponds to C struct ``navdata_demo_t``.
'''
_attrname = 'demo'
#: Flying state (landed, flying, hovering, etc.)
#: defined in CTRL_STATES enum.
ctrl_state = uint32_t
vbat_flying_percentage = uint32_t #: battery voltage filtered (mV)
theta = float32_t #: UAV's pitch in milli-degrees
phi = float32_t #: UAV's roll in milli-degrees
psi = float32_t #: UAV's yaw in milli-degrees
altitude = int32_t #: UAV's altitude in centimeters
vx = float32_t #: UAV's estimated linear velocity
vy = float32_t #: UAV's estimated linear velocity
vz = float32_t #: UAV's estimated linear velocity
#: streamed frame index Not used -> To integrate in video stage.
num_frames = uint32_t
# Camera parameters compute by detection
detection_camera_rot = _matrix33_t #: Deprecated ! Don't use !
detection_camera_trans = _vector31_t #: Deprecated ! Don't use !
detection_tag_index = uint32_t #: Deprecated ! Don't use !
detection_camera_type = uint32_t #: Type of tag searched in detection
# Camera parameters compute by drone
drone_camera_rot = _matrix33_t #: Deprecated ! Don't use !
drone_camera_trans = _vector31_t #: Deprecated ! Don't use !
@index.register(1)
class Time(OptionHeader):
'''
Timestamp
Corresponds to C struct ``navdata_time_t``.
'''
_attrname = 'time'
#: 32 bit value where the 11 most significant bits represents the seconds,
#: and the 21 least significant bits are the microseconds.
time = uint32_t
@index.register(2)
class RawMeasures(OptionHeader):
'''
Raw sensors measurements
Corresponds to C struct ``navdata_raw_measures_t``.
'''
_attrname = 'raw_measures'
# +12 bytes
raw_accs = uint16_t * NB_ACCS #: filtered accelerometers
raw_gyros = int16_t * NB_GYROS #: filtered gyrometers
raw_gyros_110 = int16_t * 2 #: gyrometers x/y 110 deg/s
vbat_raw = uint32_t #: battery voltage raw (mV)
us_debut_echo = uint16_t
us_fin_echo = uint16_t
us_association_echo = uint16_t
us_distance_echo = uint16_t
us_courbe_temps = uint16_t
us_courbe_valeur = uint16_t
us_courbe_ref = uint16_t
flag_echo_ini = uint16_t
# TODO: uint16_t frame_number from ARDrone_Magneto
nb_echo = uint16_t
sum_echo = uint32_t
alt_temp_raw = int32_t
gradient = int16_t
@index.register(21)
class PressureRaw(OptionHeader):
'Corresponds to C struct ``navdata_pressure_raw_t``.'
_attrname = 'pressure_raw'
up = int32_t
ut = int16_t
Temperature_meas = int32_t
Pression_meas = int32_t
@index.register(22)
class Magneto(OptionHeader):
'Corresponds to C struct ``navdata_magneto_t``.'
_attrname = 'magneto'
mx = int16_t
my = int16_t
mz = int16_t
magneto_raw = _vector31_t #: magneto in the body frame, in mG
magneto_rectified = _vector31_t
magneto_offset = _vector31_t
heading_unwrapped = float32_t
heading_gyro_unwrapped = float32_t
heading_fusion_unwrapped = float32_t
magneto_calibration_ok = char
magneto_state = uint32_t
magneto_radius = float32_t
error_mean = float32_t
error_var = float32_t
@index.register(23)
class WindSpeed(OptionHeader):
'Corresponds to C struct ``navdata_wind_speed_t``.'
_attrname = 'wind_speed'
wind_speed = float32_t #: estimated wind speed [m/s]
#: estimated wind direction in North-East frame [rad] e.g.
#: if wind_angle is pi/4, wind is from South-West to North-East
wind_angle = float32_t
wind_compensation_theta = float32_t
wind_compensation_phi = float32_t
state_x1 = float32_t
state_x2 = float32_t
state_x3 = float32_t
state_x4 = float32_t
state_x5 = float32_t
state_x6 = float32_t
magneto_debug1 = float32_t
magneto_debug2 = float32_t
magneto_debug3 = float32_t
@index.register(24)
class KalmanPressure(OptionHeader):
'Corresponds to C struct ``navdata_kalman_pressure_t``.'
_attrname = 'kalman_ | pressure'
offset_pressure = float32_t
est_z = float32_t
est_zdot = float32_t
est_bias_PWM = float32_t
est_biais_pression = float32_t
offs | et_US = float32_t
prediction_US = float32_t
cov_alt = float32_t
cov_PWM = float32_t
cov_vitesse = float32_t
bool_effet_sol = bool_t
somme_inno = float32_t
flag_rejet_US = bool_t
u_multisinus = float32_t
gaz_altitude = float32_t
Flag_multisinus = bool_t
Flag_multisinus_debut = bool_t
@index.register(27)
class Zimmu3000(OptionHeader):
'Corresponds to C struct ``navdata_zimmu_3000_t``.'
_attrname = 'zimmu_3000'
vzimmuLSB = int32_t
vzfind = float32_t
@index.register(3)
class PhysMeasures(OptionHeader):
'Corresponds to C struct ``navdata_phys_measures_t``.'
_attrname = 'phys_measures'
accs_temp = float32_t
gyro_temp = uint16_t
phys_accs = float32_t * NB_ACCS
phys_gyros = float32_t * NB_GYROS
alim3V3 = uint32_t #: 3.3volt alim [LSB]
vrefEpson = uint32_t #: ref volt Epson gyro [LSB]
vrefIDG = uint32_t #: ref volt IDG gyro [LSB]
@index.register(4)
class GyrosOffsets(OptionHeader):
'Corresponds to C struct ``navdata_gyros_offsets_t``.'
_attrname = 'gyros_offsets'
offset_g = float32_t * NB_GYROS
@index.register(5)
class EulerAngles(OptionHeader):
'Corresponds to C struct ``navdata_euler_angles_t``.'
_attrname = 'eular_angles'
theta_a = float32_t
phi_a = float32_t
@index.register(6)
class References(OptionHeader):
'Corresponds to C struct ``navdata_references_t``.'
_attrname = 'references'
ref_theta = int32_t
ref_phi = int32_t
ref_theta_I = int32_t
ref_phi_I = int32_t
ref_pitch = int32_t
ref_roll = int32_t
ref_yaw = int32_t
ref_psi = int32_t
vx_ref = float32_t
vy_ref = float32_t
theta_mod = float32_t
phi_mod = float32_t
k_v_x = float32_t
k_v_y = float32_t
k_mode = uint32_t
ui_time = float32_t
ui_theta = float32_t
ui_phi = float32_t
ui_psi = float32_t
ui_psi_accuracy = float32_t
ui_seq = int32_t
@index.register(7)
class Trims(OptionHeader):
'Corresponds to C struct ``navdata_trims_t``.'
_attrname = 'trims'
angular_rates_trim_r = flo |
liukaijv/XlsxWriter | xlsxwriter/test/comparison/test_chart_date04.py | Python | bsd-2-clause | 2,223 | 0 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from datetime import date
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_date04.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {'xl/charts/chart1.xml': ['<c:formatCode']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'line'})
date_format = workbook.add_format({'num_format': 14})
chart.axis_ids = [51761152, 51762688]
worksheet.set_column('A:A', 12)
dates = [date(2013, 1, 1),
date(2013, 1, 2),
date(2013, 1, 3),
date(2013, 1, 4),
date(2013, 1, 5),
date(2013, 1, 6),
date(2013, 1, 7),
date(2013, 1, 8),
| date(2013, 1, 9),
date(2013, 1, 10)]
values = [10, 30, 20, 40, 20, 60, 50, 40, 30, 30]
worksheet.write_column('A1', dates, date_format)
worksheet.write_column('B1', values)
chart.add_series({
| 'categories': '=Sheet1!$A$1:$A$10',
'values': '=Sheet1!$B$1:$B$10',
})
chart.set_x_axis({
'date_axis': True,
'minor_unit': 1,
'major_unit': 1,
'minor_unit_type': 'months',
'major_unit_type': 'years',
'num_format': 'dd/mm/yyyy',
'num_format_linked': True,
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
|
dimaleks/uDeviceX | tests/dump/graph.full.py | Python | gpl-3.0 | 371 | 0.008086 | #!/usr/bin/env python
import mirheo as mir
ranks = (1, 1, 1)
domain = (4, 4, 4)
u = mir.Mirheo(ranks, d | omain, debug_level=3, log_filename='log', no | _splash=True)
u.save_dependency_graph_graphml("tasks.full", current=False)
# sTEST: dump.graph.full
# cd dump
# rm -rf tasks.graphml
# mir.run --runargs "-n 1" ./graph.full.py
# cat tasks.full.graphml > tasks.out.txt
|
coz787/conges4ac | actools/cg_minst.py | Python | gpl-2.0 | 12,134 | 0.020277 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''cg_minst.py: conges multi-installer
outil d'installation multiple adapté au portage des applications
conges d'un env vers un autre
'''
import getpass, sys, os, subprocess, pprint, string, re, time
from traceback import print_exc
from cg_lib import exec_sql_file
import MySQLdb
lcmde = ['dryrun', 'realrun']
scmd = ""
version = "1_0"
def usage(cmd):
fmt = ''' Usage: %s \
[--help] [--debug] --conf=<filename> --cmd=commande
conf file should define
{
'rootdb': {'id':'root', 'pw':'' },
# pw saisie en interactif
'cdbprefixe': 'r1_',
'cdbcharset': 'latin1',
# les base de donnees seront nomme RRRRR_sg
'crefpath' : '/tmp/work/v2ns',
# endroit ou est installer logiciel de reference
'cdbconnectpath' : 'dbconnect.php',
# path relatif au fichier dbconnect.php
'csqlinstallpath' : 'install/sql/php_conges_v1.4.2ac2_01.sql',
# path relatif au fichier sql de creation initiale de la base de donnees
'cdestpath': '/tmp/work/srv/www/conges',
# endroit ou seront installes les instances logicielles
'clinstance': [ ['sg','c_sg_dba','******'] ,
['dsacn','c_dsacn_dba','******'] ,
]
# liste des instances : nom, usergestionnaire, passwd
# si passwd est "", il sera demandé en interactif
}
cmd among [ %s ]
'''
print fmt % (cmd, string.join(lcmde," "))
def neatconflines(ofile):
lrealine = []
while 1 :
sali = ofile.readline()
if sali == "" : break
elif sali[0] == "#" : pass # discard comment
else:
lrealine.append(sali[:-1])
return string.join(lrealine)
def mygetopt(cmd,largs):
''' process argument and if success, return
command name, debug mode , odbifile= database access, la commande ,
un dictionnaire avec les options non traitees '''
lpathitem = string.split(sys.argv[0],'/')
sacmd = lpathitem[-1]
scom = "dryrun" # default
debug = 0
odbifile = None
dallopt, dropt = {}, {}
idx = 0
soptegex1 = re.compile('^--([^\=]+)\=(.*)')
soptegex2 = re.compile('^--(.*)')
while idx < len(largs) :
s1m = soptegex1.match(largs[idx])
if s1m :
dallopt[s1m.group(1)] = s1m.group(2)
else :
s2m = soptegex2.match(largs[idx])
if s2m :
dallopt[s2m.group(1)] = 1
else :
print "arg %s is not well formatted " % largs[idx]
usage(sacmd)
sys.exit(1)
idx += 1
for (skey,svalue) in dallopt.items() :
if skey == "help" :
usage(sacmd)
sys.exit(1)
elif skey == "debug":
debug = 1
elif skey == "conf" :
try:
odbifile = open(svalue)
except:
print "conf file %s cannot be opened" % svalue
usage(sacmd)
sys.exit(1)
elif skey == "cmd":
scom = svalue
if scom not in lcmde :
print " a commande among %s should be given" % string.join(lcmde," ")
usage(sacmd)
sys.exit(1)
else:
dropt[skey] = svalue
if odbifile == None :
print "--dbid option is mandatory"
usage(sacmd)
sys.exit(1)
if debug:
print "dallopt is ", dallopt
print "dropt is ", dropt
return sacmd, debug, odbifile, scom, dropt
def subcmd(lcommand,santemsg,saftermsg):
sys.stdout.write(santemsg)
sys.stdout.flush()
op = subprocess.Popen(lcommand,stdout=subprocess.PIPE,stderr=None)
output, err = op.communicate()
sys.stdout.write(' '+saftermsg+'\n')
sys.stdout.flush()
# op = subprocess.Popen(lcommand, shell=True, stdout=subprocess.PIPE,stderr=subprocess.PIPE)
# stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# output, err = op.communicate() # capture stdout n stderr but do not display
# status =
# subprocess.check_call(lcommand, shell=True)
#if status :
# sys.stdout.write("status: %s\n" % status)
# print santemsg + " ",
# status = subprocess.call(lcommand, shell=True)
# print saftermsg
def patch_dbconnect(dirpath, filename,dpara):
''' patch dbconnect.php to substitute operational variable
$mysql_user="__CONGEDBA__" ;
$mysql_pass="__CONGEPW__";
$mysql_database= "__DBNAME__" ; '''
lpattern = [["__CONGEDBA__" , dpara['dbauser']],
["__CONGEPW__" , dpara['dbapw']] ,
["__DBNAME__" , dpara['dbname']],
["__CHARSET__" , dpara['charset']],
]
sfnname = "%s/%s" % (dirpath,filename)
ssavname = sfnname + '.sav'
try:
os.rename(sfnname,ssavname)
except:
print_exc()
print "%s file name cannot be renamed" % sfnname
sys.exit(1)
infile = open(ssavname, 'r')
outfile = open(sfnname, 'w')
# methode assez lourde mais bon ...
while True:
aline = infile.readline()
if aline == "" :
break
newline = aline
for apat in lpattern :
newline = string.replace(newline,apat[0],apat[1])
outfile.write(newline)
print "%s was patched" % sfnname
infile.close()
outfile.close()
if __name__ == '__main__':
#0 get parameter/arg
scmd, ndebug, odbif, scommand, dopt = mygetopt(sys.argv[0],sys.argv[1:])
#1.1 read conf file and check it brieffly
try:
dcgid = eval(neatconflines(odbif))
except:
print_exc()
print "database id file cannot be evaluated"
sys.exit(1)
if ndebug: print pprint.pformat(dcgid)
#1.2 ask root access and check access to mysql/mariadb
smysqlrpw = getpass.getpass("mysql root pw:")
try:
dcgid['rootdb']['pw'] = smysqlrpw
except:
print "conf file does not define ['rootdb']['pw'] key "
sys.exit(1)
odbconn_std, odbcursor_std = None, None
try:
odbconn_std = MySQLdb.connect(
"localhost",dcgid['rootdb']['id'],dcgid['rootdb']['pw'],
charset=dcgid['cdbcharset'], use_unicode=1)
odbcursor_std = odbconn_std.cursor()
except:
# print_exc()
print "root acces to mysql refused"
sys.exit(1)
if ndebug: print "root acces to mysql ok."
#1.3 check validity of crefpath
screfpath = None
try:
screfpath = dcgid['crefpath']
except:
print "ref path is not defined in conf file"
sys.exit(1)
# make sure screfpath does not end with /
nj = len(screfpath)
while nj > 0 :
if screfpath[nj - 1] == '/' :
nj -= 1
| else :
| break
screfpath = screfpath[0:nj]
if not os.access(screfpath, os.F_OK & os.R_OK) :
print "read acces to %s conges ref path refused" % dcgid['crefpath']
sys.exit(1)
screfpathdbconnect = None
try:
screfpathdbconnect = "%s/%s" % (dcgid['crefpath'],dcgid['cdbconnectpath'])
except:
print "ref path is not defined in conf file"
sys.exit(1)
if not os.access(screfpathdbconnect, os.F_OK & os.R_OK) :
print "read acces to %s file refused" % screfpathdbconnect
sys.exit(1)
scsqlinstallpath = None
try:
scsqlinstallpath = "%s/%s" % (dcgid['crefpath'],dcgid['csqlinstallpath'])
except:
print "csqlinstallpath is not defined in conf file"
sys.exit(1)
if not os.access(scsqlinstallpath, os.F_OK & os.R_OK) :
print "read acces to %s file refused" % scsqlinstallpath
sys.exit(1)
sdestpath = None
try:
sdestpath = dcgid['cdestpath']
except:
print "dest path is not defined in conf file"
sys.exit(1)
if not os.access(sdestpath, os.W_OK) :
print "write acces to %s conges dest path refused" % dcgid['cdestpath']
sys.exit(1)
#1.4 check and complete linstance definition
# (passwd captured interactively if required)
try:
leninstance = len(dcgid['clinstance'])
except:
print "'clinstance' is not defined in conf file"
sys.exit(1)
if leninstance == 0 :
print |
PALab/PLACE | place/utilities.py | Python | lgpl-3.0 | 3,034 | 0.001648 | """Helper utilities for PLACE data"""
from sys import argv
from os import remove
from os.path import basename, isdir, isfile
from itertools import count
from glob import glob
import numpy as np
def column_renamer():
"""Tool for renaming the columns in a NumPy structured array file"""
if not (len(argv) > 1 and argv[1].endswith('.npy') and isfile(argv[1])):
print('Usage:')
print(' To display column headings:')
print(' {} [FILE]'.format(basename(argv[0])))
print(' To rename a column heading (or multiple headings):')
print(' {} [FILE] [COLUMN_NUM] [NEW_COLUMN_NAME]...'.format(basename(argv[0])))
print('')
print('Example:')
print(' {} data_001.npy 1 trace 2 data'.format(basename(argv[0])))
return
with open(argv[1], 'rb') as file_p:
data = np.load(file_p)
if len(argv) == 2:
for i, name in enumerate(data.dtype.names):
print('{:2} {}'.format(i, name))
return
names = list(data.dtype.names)
for i in count(start=4, step=2):
if len(argv) > i:
names[int(argv[i-2])] = argv[i-1]
elif len(argv) == i:
names[int(argv[i-2])] = argv[i-1]
break
else:
print('Invalid number of arguments - no changes made')
return
data.dtype.names = names
print('Applying changes...', end='')
with open(argv[1], 'wb') as file_p:
np.save(file_p, data)
print('done!')
def single_file():
"""Command-line entry point to packing NumPy array"""
if not (len(argv) == 2 and isdir(argv[1])):
print('Usage: {} [DIRECTORY]')
print('Pack PLACE data_XXX.npy files into a single file.')
return
build_single_file(argv[1])
def build_single_fi | le(directory):
"""Pack the individual row files into one NumPy structured array"""
files = sorted(glob('{}/data_*.npy'.format(directory)))
num = len(files)
if num == 0:
print('No PLACE data_*.npy files found in {}'.format(directory))
return
with open(files[0], 'rb') as file_p:
row = np.load(file_p)
data = np.resize(row, (num,))
for i, filename in enumerate(files):
with open(filename, 'rb') as file_p:
| row = np.load(file_p)
data[i] = row[0]
with open('{}/data.npy'.format(directory), 'xb') as file_p:
np.save(file_p, data)
for filename in files:
remove(filename)
def multiple_files():
"""Unpack one NumPy structured array into individual row files"""
if not (len(argv) == 2 and isdir(argv[1])):
print('Usage: {} [DIRECTORY]')
print('Unpack PLACE data.npy file into multiple files.')
return
directory = argv[1]
with open('{}/data.npy'.format(directory), 'rb') as file_p:
data = np.load(file_p)
for i, row in enumerate(data):
with open('{}/data_{:03d}.npy'.format(directory, i), 'xb') as file_p:
np.save(file_p, [row])
remove('{}/data.npy'.format(directory))
|
patmarion/director | src/python/director/debugVis.py | Python | bsd-3-clause | 8,815 | 0.005786 | import director.vtkAll as vtk
from director import vtkNumpy as vnp
from director.shallowCopy import shallowCopy
import numpy as np
class DebugData(object):
def __init__(self):
self.append = vtk.vtkAppendPolyData()
def write(self, filename):
writer = vtk.vtkXMLPolyDataWriter()
writer.SetInputConnection(self.append.GetOutputPort())
writer.SetFileName(filename)
writer.Update()
def addPolyData(self, polyData, color=[1,1,1], extraLabels=None):
'''
Add a vtkPolyData to the debug data. A color can be provided.
If the extraLabels argument is used, it should be a list of tuples,
each tuple is (labelName, labelValue) where labelName is a string and
labelValue is an int or float. An array with labelName will be filled
with labelValue and added to the poly data.
'''
polyData = shallowCopy(polyData)
if color is not None:
colorArray = np.empty((polyData.GetNumberOfPoints(), 3), dtype=np.uint8)
colorArray[:,:] = np.array(color)*255
vnp.addNumpyToVtk(polyData, colorArray, 'RGB255')
if extraLabels is not None:
for labelName, labelValue in extraLabels:
extraArray = np.empty((polyData.GetNumberOfPoints(), 1), dtype=type(labelValue))
extraArray[:] = labelValue
vnp.addNumpyToVtk(polyData, extraArray, labelName)
self.append.AddInputData(polyData)
def addLine(self, p1, p2, radius=0.0, color=[1,1,1]):
line = vtk.vtkLineSource()
| line.SetPoint1(p1)
line.SetPoint2(p2)
line.Update()
poly | Data = line.GetOutput()
if radius > 0.0:
polyData = applyTubeFilter(polyData, radius)
self.addPolyData(polyData, color)
def addPolyLine(self, points, isClosed=False, radius=0.0, color=[1,1,1]):
pts = vnp.getVtkPointsFromNumpy(np.array(points, dtype=np.float64))
polyLine = vtk.vtkPolyLineSource()
polyLine.SetPoints(pts)
polyLine.SetClosed(isClosed)
polyLine.Update()
polyData = polyLine.GetOutput()
if radius > 0:
polyData = applyTubeFilter(polyData, radius)
self.addPolyData(polyData, color)
def addFrame(self, frame, scale, tubeRadius=0.0):
origin = np.array([0.0, 0.0, 0.0])
axes = [[scale, 0.0, 0.0], [0.0, scale, 0.0], [0.0, 0.0, scale]]
colors = [[1,0,0], [0,1,0], [0,0,1]]
frame.TransformPoint(origin, origin)
for axis, color in zip(axes, colors):
frame.TransformVector(axis, axis)
self.addLine(origin, origin+axis, radius=tubeRadius, color=color)
def addCircle(self, origin, normal, radius, color=[1,1,1]):
self.addCone(origin, normal, radius, height=0, color=color, fill=False)
def addCone(self, origin, normal, radius, height, color=[1,1,1], fill=True):
cone = vtk.vtkConeSource()
cone.SetRadius(radius)
cone.SetCenter(origin)
cone.SetDirection(normal)
cone.SetHeight(height)
cone.SetResolution(32)
if fill:
cone.Update()
self.addPolyData(cone.GetOutput(), color)
else:
edges = vtk.vtkExtractEdges()
edges.AddInputConnection(cone.GetOutputPort())
edges.Update()
self.addPolyData(edges.GetOutput(), color)
def addArrow(self, start, end, headRadius=0.05, headLength=None, tubeRadius=0.01, color=[1,1,1], startHead=False, endHead=True):
if headLength is None:
headLength = headRadius
normal = np.array(end) - np.array(start)
normal = normal / np.linalg.norm(normal)
if startHead:
start = np.array(start) + 0.5 * headLength * normal
if endHead:
end = np.array(end) - 0.5 * headLength * normal
self.addLine(start, end, radius=tubeRadius, color=color)
if startHead:
self.addCone(origin=start, normal=-normal, radius=headRadius,
height=headLength, color=color, fill=True)
if endHead:
self.addCone(origin=end, normal=normal, radius=headRadius,
height=headLength, color=color, fill=True)
def addSphere(self, center, radius=0.05, color=[1,1,1], resolution=24):
sphere = vtk.vtkSphereSource()
sphere.SetCenter(center)
sphere.SetThetaResolution(resolution)
sphere.SetPhiResolution(resolution)
sphere.SetRadius(radius)
sphere.Update()
self.addPolyData(sphere.GetOutput(), color)
def addCube(self, dimensions, center, color=[1,1,1], subdivisions=0):
bmin = np.array(center) - np.array(dimensions)/2.0
bmax = np.array(center) + np.array(dimensions)/2.0
cube = vtk.vtkTessellatedBoxSource()
cube.SetBounds(bmin[0], bmax[0], bmin[1], bmax[1], bmin[2], bmax[2])
cube.SetLevel(subdivisions)
cube.QuadsOn()
cube.Update()
self.addPolyData(cube.GetOutput(), color)
def addPlane(self, origin, normal, width, height, resolution=1, color=[1,1,1]):
plane = vtk.vtkPlaneSource()
plane.SetOrigin(-width/2.0, -height/2.0, 0.0)
plane.SetPoint1(width/2.0, -height/2.0, 0.0)
plane.SetPoint2(-width/2.0, height/2.0, 0.0)
plane.SetCenter(origin)
plane.SetNormal(normal)
plane.SetResolution(resolution, resolution)
plane.Update()
self.addPolyData(plane.GetOutput(), color)
def addCylinder(self, center, axis, length, radius, color=[1,1,1]):
axis = np.asarray(axis) / np.linalg.norm(axis)
center = np.array(center)
self.addLine(center - 0.5*length*axis, center + 0.5*length*axis, radius=radius, color=color)
def addCapsule(self, center, axis, length, radius, color=[1,1,1]):
axis = np.asarray(axis) / np.linalg.norm(axis)
center = np.array(center)
self.addCylinder(center=center, axis=axis, radius=radius, length=length, color=color)
self.addSphere(center=center-0.5*length*axis, radius=radius, color=color)
self.addSphere(center=center+0.5*length*axis, radius=radius, color=color)
def addTorus(self, radius, thickness, resolution=30):
q = vtk.vtkSuperquadricSource()
q.SetToroidal(1)
q.SetSize(radius)
q.SetThetaResolution(resolution)
# thickness doesnt seem to match to Eucliean units. 0 is none. 1 is full. .1 is a good valve
q.SetThickness(thickness)
q.Update()
# rotate Torus so that the hole axis (internally y), is set to be z, which we use for valves
transform = vtk.vtkTransform()
transform.RotateWXYZ(90,1,0,0)
transformFilter=vtk.vtkTransformPolyDataFilter()
transformFilter.SetTransform(transform)
transformFilter.SetInputConnection(q.GetOutputPort())
transformFilter.Update()
self.addPolyData(transformFilter.GetOutput())
def addEllipsoid(self, center, radii, resolution=24, color=[1,1,1]):
"""
Add an ellipsoid centered at [center] with x, y, and z principal axis radii given by
radii = [x_scale, y_scale, z_scale]
"""
sphere = vtk.vtkSphereSource()
sphere.SetCenter([0,0,0])
sphere.SetThetaResolution(resolution)
sphere.SetPhiResolution(resolution)
sphere.SetRadius(1.0)
sphere.Update()
transform = vtk.vtkTransform()
transform.Translate(center)
transform.Scale(radii)
transformFilter = vtk.vtkTransformPolyDataFilter()
transformFilter.SetTransform(transform)
transformFilter.SetInputConnection(sphere.GetOutputPort())
transformFilter.Update()
self.addPolyData(transformFilter.GetOutput(), color)
def addPolygon(self, points, color=[1,1,1]):
points = vnp.getVtkPointsFromNumpy(np.array(points, dtype=np.float64))
polygon = vtk.vtkPolygon()
polygon.GetPointIds().SetNumberOfIds(points.GetNumberOfPoints())
for i in range(points.GetNumberOfPoints()):
polygon.GetPointIds().SetId(i, i)
polyDa |
rado0x54/project-euler | python/problem0052.py | Python | mit | 651 | 0.003072 | #!/usr/bin/env python3
"""Project Euler - Problem 52 Module"""
import pelib
def problem52():
"""Problem 52 - Permuted multiples"""
x = 1
while True:
ss_str_2x = sorted | (set(str(2 * x)) | )
ss_str_3x = sorted(set(str(3 * x)))
ss_str_4x = sorted(set(str(4 * x)))
ss_str_5x = sorted(set(str(5 * x)))
ss_str_6x = sorted(set(str(6 * x)))
if ss_str_2x == ss_str_3x == ss_str_4x == ss_str_5x == ss_str_6x:
#print(x, ss_str_2x)
return x
x += 1
def run():
"""Default Run Method"""
return problem52()
if __name__ == '__main__':
print("Result: ", run())
|
devs1991/test_edx_docmode | cms/djangoapps/contentstore/views/import_export.py | Python | agpl-3.0 | 21,168 | 0.002646 | """
These views handle all actions in Studio related to import and exporting of
courses
"""
import base64
import logging
import os
import re
import shutil
import tarfile
from path import Path as path
from tempfile import mkdtemp
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import SuspiciousOperation, PermissionDenied
from django.core.files.temp import NamedTemporaryFile
from django.core.servers.basehttp import FileWrapper
from django.http import HttpResponse, HttpResponseNotFound
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_http_methods, require_GET
import dogstats_wrapper as dog_stats_api
from edxmako.shortcuts import render_to_response
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import SerializationError
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import LibraryLocator
from xmodule.modulestore.xml_importer import import_course_from_xml, import_library_from_xml
from xmodule.modulestore.xml_exporter import export_course_to_xml, export_library_to_xml
from xmodule.modulestore import COURSE_ROOT, LIBRARY_ROOT
from student.auth import has_course_author_access
from openedx.core.lib.extract_tar import safetar_extractall
from util.json_request import JsonResponse
from util.views import ensure_valid_course_key
from models.settings.course_metadata import CourseMetadata
from contentstore.views.entrance_exam import (
add_entrance_exam_milestone,
remove_entrance_exam_milestone_reference
)
from contentstore.utils import reverse_course_url, reverse_usage_url, reverse_library_url
__all__ = [
'import_handler', 'import_status_handler',
'export_handler',
]
log = logging.getLogger(__name__)
# Regex to capture Content-Range header ranges.
CONTENT_RE = re.compile(r"(?P<start>\d{1,11})-(?P<stop>\d{1,11})/(?P<end>\d{1,11})")
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT"))
@ensure_valid_course_key
def import_handler(request, course_key_string):
"""
The restful handler for importing a course.
GET
html: return html page for import page
json: not supported
POST or PUT
json: import a course via the .tar.gz file specified in request.FILES
"""
courselike_key = CourseKey.from_string(course_key_string)
library = isinstance(courselike_key, LibraryLocator)
if library:
root_name = LIBRARY_ROOT
successful_url = reverse_library_url('library_handler', courselike_key)
context_name = 'context_library'
courselike_module = modulestore().get_library(courselike_key)
import_func = import_library_from_xml
else:
root_name = COURSE_ROOT
successful_url = reverse_course_url('course_handler', courselike_key)
context_name = 'context_course'
courselike_module = modulestore().get_course(courselike_key)
import_func = import_course_from_xml
return _import_handler(
request, courselike_key, root_name, successful_url, context_name, courselike_module, import_func
)
def _import_handler(request, courselike_key, root_name, successful_url, context_name, courselike_module, import_func):
"""
Parameterized function containing the meat of import_handler.
"""
if not has_course_author_access(request.user, courselike_key):
raise PermissionDenied()
if 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
raise NotImplementedError('coming soon')
else:
# Do everything in a try-except block to make sure everything is properly cleaned up.
try:
data_root = path(settings.GITHUB_REPO_ROOT)
subdir = base64.urlsafe_b64encode(repr(courselike_key))
course_dir = data_root / subdir
filename = request.FILES['course-data'].name
# Use sessions to keep info about import progress
session_status = request.session.setdefault("import_status", {})
courselike_string = unicode(courselike_key) + filename
_save_request_status(request, courselike_stri | ng, 0)
# If the course has an entrance exam then remove it and its corresponding milestone.
# current course state before import.
if root_name == COURSE_ROOT:
if courselike_module. | entrance_exam_enabled:
remove_entrance_exam_milestone_reference(request, courselike_key)
log.info(
"entrance exam milestone content reference for course %s has been removed",
courselike_module.id
)
if not filename.endswith('.tar.gz'):
_save_request_status(request, courselike_string, -1)
return JsonResponse(
{
'ErrMsg': _('We only support uploading a .tar.gz file.'),
'Stage': -1
},
status=415
)
temp_filepath = course_dir / filename
if not course_dir.isdir():
os.mkdir(course_dir)
logging.debug('importing course to {0}'.format(temp_filepath))
# Get upload chunks byte ranges
try:
matches = CONTENT_RE.search(request.META["HTTP_CONTENT_RANGE"])
content_range = matches.groupdict()
except KeyError: # Single chunk
# no Content-Range header, so make one that will work
content_range = {'start': 0, 'stop': 1, 'end': 2}
# stream out the uploaded files in chunks to disk
if int(content_range['start']) == 0:
mode = "wb+"
else:
mode = "ab+"
size = os.path.getsize(temp_filepath)
# Check to make sure we haven't missed a chunk
# This shouldn't happen, even if different instances are handling
# the same session, but it's always better to catch errors earlier.
if size < int(content_range['start']):
_save_request_status(request, courselike_string, -1)
log.warning(
"Reported range %s does not match size downloaded so far %s",
content_range['start'],
size
)
return JsonResponse(
{
'ErrMsg': _('File upload corrupted. Please try again'),
'Stage': -1
},
status=409
)
# The last request sometimes comes twice. This happens because
# nginx sends a 499 error code when the response takes too long.
elif size > int(content_range['stop']) and size == int(content_range['end']):
return JsonResponse({'ImportStatus': 1})
with open(temp_filepath, mode) as temp_file:
for chunk in request.FILES['course-data'].chunks():
temp_file.write(chunk)
size = os.path.getsize(temp_filepath)
if int(content_range['stop']) != int(content_range['end']) - 1:
# More chunks coming
return JsonResponse({
"files": [{
"name": filename,
"size": size,
"deleteUrl": "",
"deleteType": "",
"url": reverse_co |
rdio/translate-toolkit | filters/test_checks.py | Python | gpl-2.0 | 65,604 | 0.005247 | # -*- coding: utf-8 -*-
from py.test import mark
from translate.filters import checks
from translate.lang import data
from translate.storage import po, xliff
def strprep(str1, str2, message=None):
return data.normalized_unicode(str1), data.normalized_unicode(str2), data.normalized_unicode(message)
def passes(filterfunction, str1, str2):
"""returns whether the given strings pass on the given test, handling FilterFailures"""
str1, str2, no_message = strprep(str1, str2)
try:
filterresult = filterfunction(str1, str2)
except checks.FilterFailure, e:
filterresult = False
return filterresult
def fails(filterfunction, str1, str2, message=None):
"""returns whether the given strings fail on the given test, handling only FilterFailures"""
str1, str2, message = strprep(str1, str2, message)
try:
filterresult = filterfunction(str1, str2)
except checks.SeriousFilterFailure, e:
filterresult = True
except checks.FilterFailure, e:
if message:
exc_message = e.messages[0]
filterresult = exc_message != message
print exc_message.encode('utf-8')
else:
filterresult = False
return not filterresult
def fails_serious(filterfunction, st | r1, str2, message=None):
"""returns whether the given strings fail on the given test, handling only SeriousFilterFailures"""
str1, str2, message = strprep(str1, str2, message)
try:
filterresult = filterfunction(str1, str2)
except checks.SeriousFilterFailure, e:
if message:
exc_message = e.messages[0]
filterresult = exc_message != message
print exc_messag | e.encode('utf-8')
else:
filterresult = False
return not filterresult
def test_defaults():
"""tests default setup and that checks aren't altered by other constructions"""
stdchecker = checks.StandardChecker()
assert stdchecker.config.varmatches == []
mozillachecker = checks.MozillaChecker()
stdchecker = checks.StandardChecker()
assert stdchecker.config.varmatches == []
def test_construct():
"""tests that the checkers can be constructed"""
stdchecker = checks.StandardChecker()
mozillachecker = checks.MozillaChecker()
ooochecker = checks.OpenOfficeChecker()
gnomechecker = checks.GnomeChecker()
kdechecker = checks.KdeChecker()
def test_accelerator_markers():
"""test that we have the correct accelerator marker for the various default configs"""
stdchecker = checks.StandardChecker()
assert stdchecker.config.accelmarkers == []
mozillachecker = checks.MozillaChecker()
assert mozillachecker.config.accelmarkers == ["&"]
ooochecker = checks.OpenOfficeChecker()
assert ooochecker.config.accelmarkers == ["~"]
gnomechecker = checks.GnomeChecker()
assert gnomechecker.config.accelmarkers == ["_"]
kdechecker = checks.KdeChecker()
assert kdechecker.config.accelmarkers == ["&"]
def test_messages():
"""test that our helpers can check for messages and that these error messages can contain Unicode"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'))
assert fails(stdchecker.validchars, "Some unexpected characters", "©", "Invalid characters: '©' (\\u00a9)")
stdchecker = checks.StandardChecker()
assert fails_serious(stdchecker.escapes, r"A tab", r"'n Ṱab\t", r"""Escapes in original () don't match escapes in translation ('Ṱab\t')""")
def test_accelerators():
"""tests accelerators"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&"))
assert passes(stdchecker.accelerators, "&File", "&Fayile")
assert fails(stdchecker.accelerators, "&File", "Fayile")
assert fails(stdchecker.accelerators, "File", "&Fayile")
assert passes(stdchecker.accelerators, "Mail && News", "Pos en Nuus")
assert fails(stdchecker.accelerators, "Mail & News", "Pos en Nuus")
assert passes(stdchecker.accelerators, "&Allow", u'&\ufeb2\ufee3\ufe8e\ufea3')
assert fails(stdchecker.accelerators, "Open &File", "Vula& Ifayile")
kdechecker = checks.KdeChecker()
assert passes(kdechecker.accelerators, "&File", "&Fayile")
assert fails(kdechecker.accelerators, "&File", "Fayile")
assert fails(kdechecker.accelerators, "File", "&Fayile")
gnomechecker = checks.GnomeChecker()
assert passes(gnomechecker.accelerators, "_File", "_Fayile")
assert fails(gnomechecker.accelerators, "_File", "Fayile")
assert fails(gnomechecker.accelerators, "File", "_Fayile")
assert fails(gnomechecker.accelerators, "_File", "_Fayil_e")
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.accelerators, "&File", "&Fayile")
assert passes(mozillachecker.accelerators, "Warn me if this will disable any of my add&-ons", "&Waarsku my as dit enige van my byvoegings sal deaktiveer")
assert fails_serious(mozillachecker.accelerators, "&File", "Fayile")
assert fails_serious(mozillachecker.accelerators, "File", "&Fayile")
assert passes(mozillachecker.accelerators, "Mail & News", "Pos en Nuus")
assert fails_serious(mozillachecker.accelerators, "Mail & News", "Pos en &Nuus")
assert fails_serious(mozillachecker.accelerators, "&File", "Fayile")
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.accelerators, "~File", "~Fayile")
assert fails(ooochecker.accelerators, "~File", "Fayile")
assert fails(ooochecker.accelerators, "File", "~Fayile")
# We don't want an accelerator for letters with a diacritic
assert fails(ooochecker.accelerators, "F~ile", "L~êer")
# Bug 289: accept accented accelerator characters
afchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&", targetlanguage="fi"))
assert passes(afchecker.accelerators, "&Reload Frame", "P&äivitä kehys")
# Problems:
# Accelerator before variable - see test_acceleratedvariables
@mark.xfail(reason="Accelerated variables needs a better implementation")
def test_acceleratedvariables():
"""test for accelerated variables"""
# FIXME: disabled since acceleratedvariables has been removed, but these checks are still needed
mozillachecker = checks.MozillaChecker()
assert fails(mozillachecker.acceleratedvariables, "%S &Options", "&%S Ikhetho")
assert passes(mozillachecker.acceleratedvariables, "%S &Options", "%S &Ikhetho")
ooochecker = checks.OpenOfficeChecker()
assert fails(ooochecker.acceleratedvariables, "%PRODUCTNAME% ~Options", "~%PRODUCTNAME% Ikhetho")
assert passes(ooochecker.acceleratedvariables, "%PRODUCTNAME% ~Options", "%PRODUCTNAME% ~Ikhetho")
def test_acronyms():
"""tests acronyms"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.acronyms, "An HTML file", "'n HTML leer")
assert fails(stdchecker.acronyms, "An HTML file", "'n LMTH leer")
assert passes(stdchecker.acronyms, "It is HTML.", "Dit is HTML.")
# We don't mind if you add an acronym to correct bad capitalisation in the original
assert passes(stdchecker.acronyms, "An html file", "'n HTML leer")
# We shouldn't worry about acronyms that appear in a musttranslate file
stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=["OK"]))
assert passes(stdchecker.acronyms, "OK", "Kulungile")
# Assert punctuation should not hide accronyms
assert fails(stdchecker.acronyms, "Location (URL) not found", "Blah blah blah")
# Test '-W' (bug 283)
assert passes(stdchecker.acronyms, "%s: option `-W %s' is ambiguous", "%s: opsie '-W %s' is dubbelsinnig")
def test_blank():
"""tests blank"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.blank, "Save as", " ")
assert fails(stdchecker.blank, "_: KDE comment\\n\nSimple string", " ")
def test_brackets():
"""tests brackets"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.brackets, "N number(s)", "N getal(le)")
assert fails(stdchecker.brackets, "For {sic} numbers", "Vier getalle")
assert fails(stdchecker.bracke |
laurentb/mpdat | mpdat/process.py | Python | mit | 735 | 0.001361 | from os.path import dirname
"""
Process MPDatClient responses
"" | "
def get_files_and_dirs_from_db(items):
"""
Returns (files, directories) from a source with files and directories mixed.
"""
files = []
dirs = []
for item in items:
if "directory" in item:
dirs.append(item["directory"])
elif "file" in item:
fi | les.append(item["file"])
return (files, dirs)
def process_song(item):
"""
Adds a "dir" attribute to songs, change "pos" to int
"""
if "file" in item:
item["dir"] = dirname(item["file"])
if "pos" in item:
item["pos"] = int(item["pos"])
if "time" in item:
item["time"] = int(item["time"])
return item
|
rasmadeus/rasmadeus.ru | rapp/article/views.py | Python | gpl-3.0 | 1,716 | 0.001748 | from django.http import HttpResponse
from django.template.loader import get_template
from rapp.article.models import Article
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
def _get_greeting(request):
return request.user.username if request.user.is_authenticated() else "everyone"
def _get_default_common_data():
return {
'common_data': {
'title': 'Developer blog',
'keywords': 'c++, python, society life',
'description': 'Developer blog',
'author': 'K. Kulikov'
}
}
def index(request):
template = get_template('index.html')
context = {
'greeting': _get_ | greeting(request),
'article': _get_default_common_data()
}
return HttpResponse(template.render(context, request))
def code_404_view(request): |
template = get_template('404.html')
context = {
'greeting': _get_greeting(request),
'article': _get_default_common_data()
}
return HttpResponse(template.render(context, request))
class ArticleDetailView(DetailView):
model = Article
context_object_name = 'article'
def get_context_data(self, **kwargs):
context = super(ArticleDetailView, self).get_context_data(**kwargs)
context['greeting'] = _get_greeting(self.request);
return context
class ArticleListView(ListView):
model = Article
context_object_name = 'articles'
def get_context_data(self, **kwargs):
context = super(ArticleListView, self).get_context_data(**kwargs)
context['article'] = _get_default_common_data()
context['greeting'] = _get_greeting(self.request);
return context
|
Zuiev/shipping-cost | app.py | Python | apache-2.0 | 2,545 | 0.009037 | #!/usr/bin/env python
import urllib
import json
import os
import smtplib
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") == "shipping.cost":
result = req.get("result")
parameters = result.get("parameters")
zone = parameters.get("shipping-zone")
cost = {'Europe':100, 'North America':200, 'South America':300, 'Asia':400, 'Africa':500}
speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
print("Response:")
print(speech)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login("diegosocialmood@gmail.com", "patricia12")
msg = zone
server.sendmail("diegosocialmood@gmail.com", "diegosocialmood@gmail.com", msg)
server.quit()
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if req.get("result").get("action") == "flowers":
result = req.get("result")
parameters = result.get("parameters")
color = parameters.get("colors")
cost = {'red':10, 'blue':17, 'green':13}
speech = "The price of a " + color + " flower is " + str(cost[color]) + " euros."
print("Response:")
print(speech)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login("diegosocialmood@gmail.com", "patricia12")
msg = color
server.sendmail("diegosocialmood@gmail.com", "zitvura@gmail.com", msg)
server.quit()
return {
| "speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=T | rue, port=port, host='0.0.0.0')
|
msegado/edx-platform | lms/djangoapps/shoppingcart/tests/test_reports.py | Python | agpl-3.0 | 12,763 | 0.004154 | # -*- coding: utf-8 -*-
"""
Tests for the Shopping Cart Models
"""
import datetime
from textwrap import dedent
import pytest
import pytz
from django.conf import settings
from mock import patch
import six
from six import StringIO
from six import text_type
from course_modes.models import CourseMode
from shoppingcart.models import (
CertificateItem,
CourseRegCodeItemAnnotation,
Order,
PaidCourseRegistration,
PaidCourseRegistrationAnnotation
)
from shoppingcart.views import initialize_report
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class ReportTypeTests(ModuleStoreTestCase):
"""
Tests for the models used to generate certificate status reports
"""
FIVE_MINS = datetime.timedelta(minutes=5)
@patch('student.models.CourseEnrollment.refund_cutoff_date')
def setUp(self, cutoff_date):
super(ReportTypeTests, self).setUp()
cutoff_date.return_value = datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=1)
# Need to make a *lot* of users for this one
self.first_verified_user = UserFactory.create(profile__name="John Doe")
self.second_verified_user = UserFactory.create(profile__name="Jane Deer")
self.first_audit_user = UserFactory.create(profile__name="Joe Miller")
self.second_audit_user = UserFactory.create(profile__name="Simon Blackquill")
self.third_audit_user = UserFactory.create(profile__name="Super Mario")
self.honor_user = UserFactory.create(profile__name="Princess Peach")
self.first_refund_user = UserFactory.create(profile__name="King Bowsér")
self.second_refund_user = UserFactory.create(profile__name="Súsan Smith")
# Two are verified, three are audit, one honor
self.cost = 40
self.course = CourseFactory.create(org='MITx', number='999', display_name=u'Robot Super Course')
self.course_key = self.course.id
course_mode = CourseMode(course_id=self.course_key,
mode_slug="honor",
mode_display_name="honor cert",
min_price=self.cost)
course_mode.save()
course_mode2 = CourseMode(course_id=self.course_key,
mode_slug="verified",
mode_display_name="verified cert",
min_price=self.cost)
course_mode2.save()
# User 1 & 2 will be verified
self.cart1 = Order.get_cart_for_user(self.first_verified_user)
CertificateItem.add_to_order(self.cart1, self.course_key, self.cost, 'verified')
self.cart1.purchase()
self.cart2 = Order.get_cart_for_user(self.second_verified_user)
CertificateItem.add_to_order(self.cart2, self.course_key, self.cost, 'verified')
self.cart2.purchase()
# Users 3, 4, and 5 are audit
CourseEnrollment.enroll(self.first_audit_user, self.course_key, "audit")
CourseEnrollment.enroll(self.second_audit_user, self.course_key, "audit")
CourseEnrollment.enroll(self.third_audit_user, self.course_key, "audit")
# User 6 is honor
CourseEnrollment.enroll(self.honor_user, self.course_key, "honor")
self.now = datetime.datetime.now(pytz.UTC)
# Users 7 & 8 are refunds
self.cart = Order.get_cart_for_user(self.first_refund_user)
CertificateItem.add_to_order(self.cart, self.course_key, self.cost, 'verified')
self.cart.purchase()
CourseEnrollment.unenroll(self.first_refund_user, self.course_key)
self.cart = Order.get_cart_for_user(self.second_refund_user)
CertificateItem.add_to_order(self.cart, self.course_key, self.cost, 'verified')
self.cart.purchase(self.second_refund_user.username, self.course_key)
CourseEnrollment.unenroll(self.second_refund_user, self.course_key)
self.test_time = datetime.datetime.now(pytz.UTC)
first_refund = CertificateItem.objects.get(id=3)
first_refund.fulfilled_time = self.test_time
first_refund.refund_requested_time = self.test_time
first_refund.save()
second_refund = CertificateItem.objects.get(id=4)
second_refund.fulfilled_time = self.test_time
second_ref | und.refund_requested_time = self.test_time
second_refund.save()
self | .CORRECT_REFUND_REPORT_CSV = dedent(u"""
Order Number,Customer Name,Date of Original Transaction,Date of Refund,Amount of Refund,Service Fees (if any)
3,King Bowsér,{time_str},{time_str},40.00,0.00
4,Súsan Smith,{time_str},{time_str},40.00,0.00
""".format(time_str=str(self.test_time)))
self.CORRECT_CERT_STATUS_CSV = dedent("""
University,Course,Course Announce Date,Course Start Date,Course Registration Close Date,Course Registration Period,Total Enrolled,Audit Enrollment,Honor Code Enrollment,Verified Enrollment,Gross Revenue,Gross Revenue over the Minimum,Number of Verified Students Contributing More than the Minimum,Number of Refunds,Dollars Refunded
MITx,999 Robot Super Course,,,,,6,3,1,2,80.00,0.00,0,2,80.00
""".format(time_str=str(self.test_time)))
self.CORRECT_UNI_REVENUE_SHARE_CSV = dedent("""
University,Course,Number of Transactions,Total Payments Collected,Service Fees (if any),Number of Successful Refunds,Total Amount of Refunds
MITx,999 Robot Super Course,6,80.00,0.00,2,80.00
""".format(time_str=str(self.test_time)))
def test_refund_report_rows(self):
report = initialize_report("refund_report", self.now - self.FIVE_MINS, self.now + self.FIVE_MINS)
refunded_certs = report.rows()
# check that we have the right number
self.assertEqual(len(list(refunded_certs)), 2)
self.assertTrue(CertificateItem.objects.get(user=self.first_refund_user, course_id=self.course_key))
self.assertTrue(CertificateItem.objects.get(user=self.second_refund_user, course_id=self.course_key))
def test_refund_report_purchased_csv(self):
"""
Tests that a generated purchase report CSV is as we expect
"""
report = initialize_report("refund_report", self.now - self.FIVE_MINS, self.now + self.FIVE_MINS)
csv_file = StringIO()
report.write_csv(csv_file)
csv = csv_file.getvalue()
csv_file.close()
# Using excel mode csv, which automatically ends lines with \r\n, so need to convert to \n
self.assertEqual(
csv.replace('\r\n', '\n').strip() if six.PY3 else csv.replace('\r\n', '\n').strip().decode('utf-8'),
self.CORRECT_REFUND_REPORT_CSV.strip()
)
@pytest.mark.skip(reason="Fails in django 2.1 and above and the app is deprecated, hence skipping it")
def test_basic_cert_status_csv(self):
report = initialize_report("certificate_status", self.now - self.FIVE_MINS, self.now + self.FIVE_MINS, 'A', 'Z')
csv_file = StringIO()
report.write_csv(csv_file)
csv = csv_file.getvalue()
self.assertEqual(csv.replace('\r\n', '\n').strip(), self.CORRECT_CERT_STATUS_CSV.strip())
@pytest.mark.skip(reason="Fails in django 2.1 and above and the app is deprecated, hence skipping it")
def test_basic_uni_revenue_share_csv(self):
report = initialize_report("university_revenue_share", self.now - self.FIVE_MINS, self.now + self.FIVE_MINS, 'A', 'Z')
csv_file = StringIO()
report.write_csv(csv_file)
csv = csv_file.getvalue()
self.assertEqual(csv.replace('\r\n', '\n').strip(), self.CORRECT_UNI_REVENUE_SHARE_CSV.strip())
class ItemizedPurchaseReportTest(ModuleStoreTestCase):
"""
Tests for the models used to generate itemized purchase reports
"""
FIVE_MINS = datetime.timedelta(minutes=5)
TEST_ANNOTATION = u'Ba\xfc\u5305'
def setUp(self):
super(ItemizedPurchaseReportTest, self).setUp()
self |
Hao-Liu/avocado | selftests/functional/test_utils.py | Python | gpl-2.0 | 4,216 | 0.001186 | import os
import sys
import time
import tempfile
import shutil
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
from avocado.utils import process
FAKE_VMSTAT_CONTENTS = """#!/usr/bin/python
import time
import random
import signal
import sys
class FakeVMStat(object):
def __init__(self, interval):
self.interval = interval
self._sysrand = random.SystemRandom()
def interrupt_handler(signum, frame):
sys.exit(0)
signal.signal(signal.SIGINT, interrupt_handler)
signal.signal(signal.SIGTERM, interrupt_handler)
def get_r(self):
return self._sysrand.randint(0, 2)
def get_b(self):
return 0
def get_swpd(self):
return 0
def get_free(self):
return self._sysrand.randint(1500000, 1600000)
def get_buff(self):
return self._sysrand.randint(290000, 300000)
def get_cache(self):
return self._sysrand.randint(2900000, 3000000)
def get_si(self):
return 0
def get_so(self):
return 0
def get_bi(self):
return self._sysrand.randint(0, 50)
def get_bo(self):
return self._sysrand.randint(0, 500)
def get_in(self):
return self._sysrand.randint(200, 3000)
def get_cs(self):
return self._sysrand.randint(1000, 4000)
def get_us(self):
return self._sysrand.randint(0, 40)
def get_sy(self):
return self._sysrand.randint(1, 5)
def get_id(self):
return self._sysrand.randint(50, 100)
def get_wa(self):
return 0
def get_st(self):
return 0
def start(self):
print("procs -----------memory---------- ---swap-- -----io---- -system-- ------cpu-----")
print(" r b swpd free buff cache si so bi bo in cs us sy id wa st")
while True:
r = self.get_r()
b = self.get_b()
swpd = self.get_swpd()
free = self.get_free()
buff = self.get_buff()
cache = self.get_cache()
si = self.get_si()
so = self.get_so()
bi = self.get_bi()
bo | = self.get_bo()
m_in = self.get_in()
cs = self.get_cs()
us = self.get_us()
sy = self.get_sy()
m_id = self.get_id()
wa = self.get_wa()
st = self.get_st()
print("%2d %2d %2d %7d %6d %7d %1d %1d %2d %3d %4d %2d %2d %1d %3d %1d %1d" %
(r, b, swpd, free, buff, cache, si, so, bi, bo, m_in, cs,
us, sy, m_id, wa, st))
ti | me.sleep(self.interval)
if __name__ == '__main__':
vmstat = FakeVMStat(interval=float(sys.argv[1]))
vmstat.start()
"""
FAKE_UPTIME_CONTENTS = """#!/usr/bin/python
if __name__ == '__main__':
print("17:56:34 up 8:06, 7 users, load average: 0.26, 0.20, 0.21")
"""
class ProcessTest(unittest.TestCase):
def setUp(self):
self.base_logdir = tempfile.mkdtemp(prefix='avocado_process_functional')
self.fake_vmstat = os.path.join(self.base_logdir, 'vmstat')
with open(self.fake_vmstat, 'w') as fake_vmstat_obj:
fake_vmstat_obj.write(FAKE_VMSTAT_CONTENTS)
os.chmod(self.fake_vmstat, 0775)
self.fake_uptime = os.path.join(self.base_logdir, 'uptime')
with open(self.fake_uptime, 'w') as fake_uptime_obj:
fake_uptime_obj.write(FAKE_UPTIME_CONTENTS)
os.chmod(self.fake_uptime, 0775)
def test_process_start(self):
proc = process.SubProcess('%s 1' % self.fake_vmstat)
proc.start()
time.sleep(3)
proc.terminate()
proc.wait()
stdout = proc.get_stdout()
self.assertIn('memory', stdout, 'result: %s' % stdout)
self.assertRegexpMatches(stdout, '[0-9]+')
def test_process_run(self):
proc = process.SubProcess(self.fake_uptime)
result = proc.run()
self.assertEqual(result.exit_status, 0, 'result: %s' % result)
self.assertIn('load average', result.stdout)
def tearDown(self):
shutil.rmtree(self.base_logdir)
if __name__ == '__main__':
unittest.main()
|
AugustG98/NWT-Bot | NWT-Bot/books.py | Python | mit | 5,132 | 0.049493 | bookprefix = {
'Genesis' : '1',
'genesis' : '1',
'Gen' : '1',
'gen' : '1',
'Exodus' : '2',
'exodus' : '2',
'Exo' : '2',
'exo' : '2',
'Ex' : '2',
'ex' : '2',
'Leviticus' : '3',
'leviticus' : '3',
'Lev' : '3',
'lev' : '3',
'Numbers' : '4',
'numbers' : '4',
'Numb' : '4',
'numb' : '4',
'Num' : '4',
'num' : '4',
'Deuteronomy' : '5',
'deuteronomy' : '5',
'Deut' : '5',
'deut' : '5',
'Joshua' : '6',
'joshua' : '6',
'Josh' : '6',
'josh' : '6',
'Judges' : '7' ,
'judges' : '7' ,
'Judg' : '7',
'judg' : '7',
'Ruth' : '8',
'ruth' : '8',
'1Samuel' : '9',
'1samuel' : '9',
'1Sam' : '9',
'1sam' : '9',
'2Samuel' : '10',
'2samuel' : '10',
'2Sam' : '10',
'2sam' : '10',
'1Kings' : '11',
'1kings' : '11',
'1King' : '11',
'1king' : '11',
'1Ki' : '11',
'1ki' : '11',
'2Kings' : '12',
'2kings' : '12',
'2King' : '12',
'2king' : '12',
'2Ki' : '12',
'2ki' : '12',
'1Chronicles' : '13',
'1chronicles' : '13',
'1Chron' : '13',
'1chron' : '13',
'2Chronicles' : '14',
'2chronicles' : '14',
'2Chron' : '14',
'2chron' : '14',
'Ezra' : '15',
'ezra' : '15',
'Nehemiah' : '16',
'nehemiah' : '16',
'Neh' : '16',
'neh' : '16',
'Esther' : '17',
'esther' : '17',
'Job' : '18',
'job' : '18',
'Psalms' : '19',
'psalms' : '19',
'Psalm' : '19',
'psalm' : '19',
'Ps' : '19',
'ps' : '19',
'Proverbs' : '20',
'proverbs' : '20',
'Proverb' : '20',
'proverb' : '20',
'Prov' : '20',
'prov' : '20',
'Ecclesiastes' : '21',
'ecclesiastes' : '21',
'Eccl' : '21',
'eccl' : '21',
'SongofSolomon' : '22',
'songofSolomon' : '22',
'songofsolomon' : '22',
'SongofSol' : '22',
'songofSol' : '22',
'songofsol' : '22',
'Isaiah' : '23',
'isaiah' : '23',
'Isa' : '23',
'isa' : '23',
'Jeremiah' : '24',
'jeremiah' : '24',
'Jer' : '24',
'jer' : '24',
'Lamentations' : '25',
'lamentations' : '25',
'Lam' : '25',
'lam' : '25',
'Ezekiel' : '26',
'ezekiel' : '26',
'Ez' : '26',
'ez' : '26',
'Daniel' : '27',
'daniel' : '27',
'Dan' : '27',
'dan' : '27',
'Hosea' : '28',
'hosea' : '28',
'Hos' : '28',
'hos' : '28',
'Joel' : '29',
'joel' : '29',
'Amos' : '30',
'amos' : '30',
'Obadiah' : '31',
'obadiah' : '31',
'Obad' : '31',
'obad' : '31',
'Jonah' : '32',
'jonah' : '32',
'Micah' : '33',
'micah' : '33',
'Mic' : '33',
'mic' : '33',
'Nahum' : '34' ,
'nahum' : '34' ,
'Nah' : '34',
'nah' : '34',
'Habakkuk' : '35',
'habakkuk' : '35',
'Hab' : '35',
'hab' : '35',
'Zephaniah' : '36',
'zephaniah' : '36',
'Zeph' : '36',
'zeph' : '36',
'Haggai' : '37',
'haggai' : '37',
'Hag' : '37',
'hag' : '37',
'Zechariah' : '38',
'zechariah' : '38',
'Zech' : '38',
'zech' : '38',
'Malachi' : '39',
'malachi' : '39',
'Mal' : '39',
'mal' : '39',
'Matthew' : '40',
'matthew' : '40',
'Matt' : '40',
'matt' : '40',
'Mark' : '41',
'mark' : '41',
'Luke' : '42',
'luke' : '42',
'John' : '43',
'john' : '43',
'Acts' : '44',
'acts' : '44',
'Act' : '44',
'act' : '44',
'Romans' : '45',
'romans' : '45',
'Rom' : '45',
'rom' : '45',
'1Corinthians' : '46',
'1corinthians' : '46',
'1Cor' : '46',
'1cor' : '46',
'2Corinthians' : '47',
'2corinthians' : '47',
'2Cor' : '47',
'2cor' : '47',
'Galatians' : '48',
'galatians' : '48',
'Gal' : '48',
'gal' : '48',
'Ephesians' : '49',
'ephesians' : '49',
'Eph' : '49',
'eph' : '49',
'Philippians' : '50',
'philippians' : '50',
'Phil' : '50',
'phil' : '50',
'Colossians' : '51',
'colossians' : '51',
'Col' : '51',
'col' : '51',
'1Thessalonians' : '52',
'1thessalonians' : '52',
'1Thess' : '52',
'1thess' : '52',
'2Thessalonians' : '53',
'2thessalonians' : '53',
'2Thess' : '53',
'2thess' : '53',
'1Timothy' : '54',
'1timothy' : '54',
'1 | Tim' : '54',
'1tim' : '54',
'2Timothy' : '55',
'2timothy' : '55',
'2Tim' : '55',
'2tim' : '55',
'Titus' : '56',
'titus' : '56',
'Philemon' : '57',
'philemon' : '57',
'Philem' : '57',
' | philem' : '57',
'Hebrews' : '58',
'hebrews' : '58',
'Heb' : '58',
'heb' : '58',
'James' : '59',
'james' : '59',
'Jas' : '59',
'jas' : '59',
'1Peter' : '60',
'1peter' : '60',
'1Pet' : '60',
'1pet' : '60',
'2Peter' : '61',
'2peter' : '61',
'2Pet' : '61',
'2pet' : '61',
'1John' : '62',
'1john' : '62',
'2John' : '63',
'2john' : '63',
'3John' : '64',
'3john' : '64',
'Jude' : '65',
'jude' : '65',
'Revelation' : '66',
'revelation' : '66',
'Rev' : '66',
'rev' : '66'
}
|
humdings/zipline | tests/utils/test_cache.py | Python | apache-2.0 | 2,166 | 0 | from unittest import TestCase
from pandas import Timestamp, Timedelta
from zipline.utils.cache import CachedObject, Expired, ExpiringCache
class CachedObjectTestCase(TestCase):
def test_cached_object(self):
expiry = Timestamp('2014')
before = expiry - Timedelta('1 minute')
after = expiry + Timedelta('1 minute')
obj = CachedObject(1, expiry)
self.assertEqual(obj.unwrap(before), 1)
self.assertEqual(obj.unwrap(expiry), 1) # Unwrap on expiry is allowed.
with self.assertRaises(Expired) as e:
obj.unwrap(after)
self.assertEqual(e.exception.args, (expiry,))
def test_expired(self):
always_expired = CachedObject.expired()
for dt in Timestamp.min, Timestamp.now(), Timestamp.max:
with self.assertRaises(Expired):
always_expired.unwrap(dt)
class ExpiringCacheTestCase(TestCase):
def test_expiring_cache(self):
expiry_1 = Timestamp('2014')
before_1 = expiry_1 - Timedelta('1 minute')
after_1 = expiry_1 + Timedelta('1 minute')
expiry_2 = Timestamp('2015')
after_2 = expiry_1 + Timedelta('1 minute')
expiry_3 = Timestamp('2016')
cache = ExpiringCache()
c | ache.set('foo', 1, expiry_1)
cache.set('bar', 2, expiry_2)
self.assertEqual(cache.get('fo | o', before_1), 1)
# Unwrap on expiry is allowed.
self.assertEqual(cache.get('foo', expiry_1), 1)
with self.assertRaises(KeyError) as e:
self.assertEqual(cache.get('foo', after_1))
self.assertEqual(e.exception.args, ('foo',))
# Should raise same KeyError after deletion.
with self.assertRaises(KeyError) as e:
self.assertEqual(cache.get('foo', before_1))
self.assertEqual(e.exception.args, ('foo',))
# Second value should still exist.
self.assertEqual(cache.get('bar', after_2), 2)
# Should raise similar KeyError on non-existent key.
with self.assertRaises(KeyError) as e:
self.assertEqual(cache.get('baz', expiry_3))
self.assertEqual(e.exception.args, ('baz',))
|
warmersun/lessonplan2 | warmersun_lessonplans_site/lessonplan/migrations/0003_homepage.py | Python | gpl-2.0 | 816 | 0.002451 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),
('lessonplan', '0002_auto_20150818_0717'),
]
operations = [
migrations.CreateModel(
name='HomePage',
fields=[
| ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='wagtailcore.Pa | ge')),
('body', wagtail.wagtailcore.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
|
thom-at-redhat/cfme_tests | fixtures/log.py | Python | gpl-2.0 | 4,066 | 0.003443 | import collections
import pytest
from utils import log
#: A dict of tests, and their state at various test phases
test_tracking = collections.defaultdict(dict)
# | Expose the cfme logger as a fixture for convenience
@pytest.fixture(scope='session')
def logger():
return log.logger
@pytest.mark.hookwrapper
def pytest_runtest_setup(item):
path, lineno, domaininfo = item.location
logger().info(log.format_marker(_format_nodeid(item.nodeid), mark="-"),
extra={'source_file': path, 'source_lineno': lineno})
yield
def pytest_collection_modifyitems(session, config, items):
logger( | ).info(log.format_marker('Starting new test run', mark="="))
expression = config.getvalue('keyword') or False
expr_string = ', will filter with "%s"' % expression if expression else ''
logger().info('Collected %i items%s' % (len(items), expr_string))
@pytest.mark.hookwrapper
def pytest_runtest_logreport(report):
# e.g. test_tracking['test_name']['setup'] = 'passed'
# test_tracking['test_name']['call'] = 'skipped'
# test_tracking['test_name']['teardown'] = 'failed'
yield
test_tracking[_format_nodeid(report.nodeid, False)][report.when] = report.outcome
if report.when == 'teardown':
path, lineno, domaininfo = report.location
logger().info(log.format_marker('%s result: %s' % (_format_nodeid(report.nodeid),
_test_status(_format_nodeid(report.nodeid, False)))),
extra={'source_file': path, 'source_lineno': lineno})
if report.outcome == "skipped":
# Usualy longrepr's a tuple, other times it isn't... :(
try:
longrepr = report.longrepr[-1]
except AttributeError:
longrepr = str(report.longrepr)
logger().info(log.format_marker(longrepr))
def pytest_exception_interact(node, call, report):
# Despite the name, call.excinfo is a py.code.ExceptionInfo object. Its traceback property
# is similarly a py.code.TracebackEntry. The following lines, including "entry.lineno+1" are
# based on the code there, which does unintuitive things with a traceback's line number.
# This is the same code that powers py.test's output, so we gain py.test's magical ability
# to get useful AssertionError output by doing it this way, which makes the voodoo worth it.
entry = call.excinfo.traceback.getcrashentry()
logger().error(call.excinfo.getrepr(),
extra={'source_file': entry.path, 'source_lineno': entry.lineno + 1})
def pytest_sessionfinish(session, exitstatus):
c = collections.Counter()
for test in test_tracking:
c[_test_status(test)] += 1
# Prepend a total to the summary list
results = ['total: %d' % sum(c.values())] + map(lambda n: '%s: %d' % (n[0], n[1]), c.items())
# Then join it with commas
summary = ', '.join(results)
logger().info(log.format_marker('Finished test run', mark='='))
logger().info(log.format_marker(str(summary), mark='='))
def _test_status(test_name):
test_phase = test_tracking[test_name]
# Test failure in setup or teardown is an error, which pytest doesn't report internally
if 'failed' in (test_phase.get('setup', 'failed'), test_phase.get('teardown', 'failed')):
return 'error'
# A test can also be skipped
elif 'skipped' in test_phase.get('setup', 'skipped'):
return 'skipped'
# Otherwise, report the call phase outcome (passed, skipped, or failed)
else:
return test_phase['call']
def _format_nodeid(nodeid, strip_filename=True):
# Remove test class instances and filenames, replace with a dot to impersonate a method call
nodeid = nodeid.replace('::()::', '.')
# Trim double-colons to single
nodeid = nodeid.replace('::', ':')
# Strip filename (everything before and including the first colon)
if strip_filename:
try:
return nodeid.split(':', 1)[1]
except IndexError:
# No colon to split on, return the whole nodeid
return nodeid
else:
return nodeid
|
yunity/yunity-core | karrot/utils/serializers.py | Python | agpl-3.0 | 97 | 0 | from rest_framework import serializers |
class EmptySerializer(serializers.Serializer):
| pass
|
tariqdaouda/pyGeno | pyGeno/tools/parsers/CSVTools.py | Python | apache-2.0 | 10,715 | 0.052543 | import os, types, collections
class EmptyLine(Exception) :
"""Raised when an empty or comment line is found (dealt with internally)"""
def __init__(self, lineNumber) :
message = "Empty line: #%d" % lineNumber
Exception.__init__(self, message)
self.message = message
def __str__(self) :
return self.message
def removeDuplicates(inFileName, outFileName) :
"""removes duplicated lines from a 'inFileName' CSV file, the results are witten in 'outFileName'"""
f = open(inFileName)
legend = f.readline()
data = ''
h = {}
h[legend] = 0
lines = f.readlines()
for l in lines :
if l not in h :
h[l] = 0
data += l
f.flush()
f.close()
f = open(outFileName, 'w')
f.write(legend+data)
f.flush()
f.close()
def catCSVs(folder, ouputFileName, removeDups = False) :
"""Concatenates all csv in 'folder' and wites the results in 'ouputFileName'. My not work on non Unix systems"""
strCmd = r"""cat %s/*.csv > %s""" %(folder, ouputFileName)
os.system(strCmd)
if removeDups :
removeDuplicates(ouputFileName, ouputFileName)
def joinCSVs(csvFilePaths, column, ouputFileName, separator = ',') :
"""csvFilePaths should be an iterable. Joins all CSVs according to the values in the column 'column'. Write the results in a new file 'ouputFileName' """
res = ''
legend = []
csvs = []
for f in csvFilePaths :
c = CSVFile()
c.parse(f)
csvs.append(c)
legend.append(separator.join(list(c.legend.keys())))
legend = separator.join(legend)
lines = []
for i in range(len(csvs[0])) :
val = csvs[0].get(i, column)
line = separator.join(csvs[0][i])
for c in csvs[1:] :
for j in range(len(c)) :
if val == c.get(j, column) :
line += separator + separator.join(c[j])
lines.append( line )
res = legend + '\n' + '\n'.join(lines)
f = open(ouputFileName, 'w')
f.write(res)
f.flush()
f.close()
return res
class CSVEntry(object) :
"""A single entry in a CSV file"""
def __init__(self, csvFile, lineNumber = None) :
self.csvFile = csvFile
self.data = []
if lineNumber != None :
self.lineNumber = lineNumber
tmpL = csvFile.lines[lineNumber].replace('\r', '\n').replace('\n', '')
if len(tmpL) == 0 or tmpL[0] in ["#", "\r", "\n", csvFile.lineSeparator] :
raise EmptyLine(lineNumber)
tmpData = tmpL.split(csvFile.separator)
# tmpDatum = []
i = 0
while i < len(tmpData) :
# for d in tmpData :
d = tmpData[i]
sd = d.strip()
if len(sd) > 0 and sd[0] == csvFile.stringSeparator :
more = []
for i in range(i, len(tmpData)) :
more.append(tmpData[i])
i+=1
if more[-1][-1] == csvFile.stringSeparator :
break
self.data.append(",".join(more)[1:-1])
# if len(tmpDatum) > 0 or (len(sd) > 0 and sd[0] == csvFile.stringSeparator) :
# tmpDatum.append(sd)
# if len(sd) > 0 and sd[-1] == csvFile.stringSeparator :
# self.data.append(csvFile.separator.join(tmpDatum))
# tmpDatum = []
else :
self.data.append(sd)
i += 1
else :
self.lineNumber = len(csvFile)
for i in range(len(self.csvFile.legend)) :
self.data.append('')
def commit(self) :
"""commits the line so it is added to a file stream"""
self.csvFile.commitLine(self)
def __iter__(self) :
self.currentField = -1
return self
def __next__(self) :
self.currentField += 1
if self.currentField >= len(self.csvFile.legend) :
raise StopIteration
k = list(self.csvFile.legend.keys())[self.currentField]
v = self.data[self.currentField]
return k, v
def __getitem__(self, key) :
"""Returns the value of field 'key'"""
try :
indice = self.csvFile.legend[key.lower()]
except KeyError :
raise KeyError("CSV File has no column: '%s'" % key)
return self.data[indice]
def __setitem__(self, key, value) :
"""Sets the value of field 'key' to 'value' """
try :
field = self.csvFile.legend[key.lower()]
except KeyError :
self.csvFile.addField(key)
field = self.csvFile.legend[key.lower()]
self.data.append(str(value))
else :
try:
self.data[field] = str(value)
except Exception as e:
for i in range(field-len(self.data)+1) :
self.data.append("")
self.data[field] = str(value)
def toStr(self) :
return self.csvFile.separator.join(self.data)
def __repr__(self) :
r = {}
for k, v in self.csvFile.legend.items() :
r[k] = self.data[v]
return "<line %d: %s>" %(self.lineNumber, str(r))
def __str__(self) :
return repr(self)
class CSVFile(object) :
"""
Represents a whole CSV file::
#reading
f = CSVFile()
f.parse('hop.csv')
for line in f :
print(line['ref'])
#writing, legend can either be a list of a dict {field : column number}
f = CSVFile(legend = ['name', 'email'])
l = f.newLine()
l['name'] = 'toto'
l['email'] = "hop@gmail.com"
for field, value in l :
print(field, value)
f.save('myCSV.csv')
"""
def __init__(self, legend = [], separator = ',', lineSeparator = '\n') :
self.legend = collections.OrderedDict()
for i in range(len(legend)) :
if legend[i].lower() in self.legend :
raise ValueError("%s is already in the legend" % legend[i].lower())
self.legend[legend[i].lower()] = i
self.strLegend = separator.join(legend)
self.filename = ""
self.lines = []
self.separator = separator
self.lineSeparator = lineSeparator
self.currentPos = -1
self.streamFile = None
self.writeRate = None
self.streamBuffer = None
self.keepInMemory = True
def addField(self, field) :
"""add a filed to the legend"""
if field.lower() in self.legend :
raise ValueError("%s is already in the legend" % field.lower())
self.legend[field.lower()] = len(self.legend)
if len(self.strLegend) > 0 :
self.strLegend += self.separator + field
else :
self.strLegend += field
def parse(self, filePath, skipLines=0, separator = ',', stringSepar | ator = '"', lineSeparator = '\n') :
"""Loads a CSV file"""
self.filename = filePath
f = open(filePath)
if lineSeparator == '\n' :
lines = f.readlin | es()
else :
lines = f.read().split(lineSeparator)
f.flush()
f.close()
lines = lines[skipLines:]
self.lines = []
self.comments = []
for l in lines :
if len(l) != 0 and l[0] != "#" :
self.lines.append(l)
elif l[0] == "#" :
self.comments.append(l)
self.separator = separator
self.lineSeparator = lineSeparator
self.stringSeparator = stringSeparator
self.legend = collections.OrderedDict()
i = 0
for c in self.lines[0].lower().replace(stringSeparator, '').split(separator) :
legendElement = c.strip()
if legendElement not in self.legend :
self.legend[legendElement] = i
i+=1
self.strLegend = self.lines[0].replace('\r', '\n').replace('\n', '')
self.lines = self.lines[1:]
# sk = skipLines+1
# for l in self.lines :
# if l[0] == "#" :
# sk += 1
# else :
# break
# self.header = self.lines[:sk]
# self.lines = self.lines[sk:]
def streamToFile(self, filename, keepInMemory = False, writeRate = 1) :
"""Starts a stream to a file. Every line must be committed (l.commit()) to be appended in to the file.
If keepInMemory is set to True, the parser will keep a version of the whole CSV in memory, writeRate is the number
of lines that must be committed before an automatic save is triggered.
"""
if len(self.legend) < 1 :
raise ValueError("There's no legend defined")
try :
os.remove(filename)
except :
pass
self.streamFile = open(filename, "a")
self.writeRate = writeRate
self.streamBuffer = []
self.keepInMemory = keepInMemory
self.streamFile.write(self.strLegend + "\n")
def commitLine(self, line) :
"""Commits a line making it ready to be streamed to a file and saves the current buffer if needed. If no stream is active, raises a ValueError"""
if self.streamBuffer is None :
raise ValueError("Commit lines is only for when you are streaming to a file")
self.streamBuffer.append(line)
if len(self.streamBuffer) % self.writeRate == 0 :
for i in range(len(self.streamBuffer)) :
self.streamBuffer[i] = str(self.streamBuffer[i])
self.streamFile.write("%s\n" % ('\n'.join(self.streamBuf |
egabancho/invenio-ext | invenio_ext/template/config.py | Python | gpl-2.0 | 947 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if | not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 3 | 30, Boston, MA 02111-1307, USA.
"""Jinja2 configuration."""
from __future__ import unicode_literals
JINJA2_EXTENSIONS = [
'jinja2.ext.do',
]
"""List of automatically loaded extensions."""
|
statmuse/bagelbot | check_attendance.py | Python | mit | 6,699 | 0.003583 | #!/usr/bin/env python
"""
Bagelbot script for checking for attendance for an upcoming bagelbot meeting.
"""
import logging
import sys
import time
from datetime import datetime, timedelta
from config import ATTENDANCE_TIME_LIMIT
from utils import YES, NO, initialize, nostdout, download_shelve_from_s3, upload_shelve_to_s3
def check_attendance(store, sc, users=None):
"""Pings all slack users with the email address stored in config.py.
It asks if they are available for today's meeting, and waits for a pre-determined amount of time.
If all users respond, or if the time limit is reached, the script exits
and writes today's upcoming meeting to the store.
Args:
store (instance): A persistent, dictionary-like object used to keep information about past/future meetings
sc (SlackClient): An instance of SlackClient
users (list): A list of users to ping for role call (overrides store['everyone'])
"""
start = datetime.now()
todays_meeting = {"date": start.date(), "available": [], "out": []}
if not users:
users = store["everyone"]
user_len = len(users)
messages_sent = {}
if sc.rtm_connect():
for user in users:
logging.info("Pinging %s...", user)
message = sc.api_call(
"chat.postMessage",
channel="@" + user,
as_user=True,
text="Will you be available for today's ({:%m/%d/%Y}) :coffee: and :bagel: meeting? (yes/no)".format(
todays_meeting["date"]
),
)
message["user"] = user
messages_sent[message["channel"]] = message
logging.info("Waiting for responses...")
while True:
try:
events = sc.rtm_read()
for event in events:
logging.debug(event)
if (
event["type"] == "message"
and event["channel"] in messages_sent
and float(event["ts"]) > float(messages_sent[event["channel"]]["ts"])
):
lower_txt = event["text"].lower().strip()
user = messages_sent[event["channel"]]["user"]
logging.info(
"%s responded with '%s'", user, event["text"].encode("ascii", "ignore")
)
user_responded = False
if lower_txt in YES:
user_responded = True
todays_meeting["available"].append(user)
sc.api_call(
"chat.postMessage",
channel=event["channel"],
as_user=True,
text="Your presence has been acknowledged! Thank you! :tada:",
)
elif lower_txt in NO:
user_responded = True
todays_meeting["out"].append(user)
sc.api_call(
"chat.postMessage",
channel=event["channel"],
as_user=True,
text="Your absence has been acknowledged! You will be missed! :cry:",
)
if user_responded:
# User has responded to bagelbot, don't listen to this channel anymore.
messages_sent.pop(event["channel"])
except:
logging.exception("Something went wrong reading Slack RTM Events.")
all_accounted_for = (
len(todays_meeting["available"]) + len(todays_meeting["out"]) == user_len
)
if (
datetime.now() > (start + timedelta(seconds=ATTENDANCE_TIME_LIMIT))
or all_accounted_for
):
if not all_accounted_for:
# Move any remaining users over to 'out' at the end of the time limit - assuming they aren't available
todays_meeting["out"] += [
u
for u in users
if u not in todays_meeting["available"] and u not in todays_meeting["out"]
]
logging.info(
"Finished! These people aren't available today: %s",
", ".join(todays_meeting["out"]),
)
# Store this upcoming meeting under a separate key for use by generate_meeting.py upon actual meeting generation.
store["upcoming"] = todays_meeting
break
else:
time.sleep(1)
else:
logging.info("Connection Failed, invalid token?")
def main(args):
"""
Initialize the shelf, possibly sync to s3, then check attendance, close
the shelf and maybe sync the shelf again.
Args:
args (ArgumentParser args): Parsed arguments that impact how the check_attandance runs
"""
if args.s3_sync:
download_shelve_from_s3()
if args.debug:
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
else:
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(message)s")
store, sc = initialize(update_everyone=True)
try:
check_attendance(store, sc, users=args.users)
finally:
store.close()
if args.s3_sync:
upload_shelve_to_s3()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description="Check to see if any Slack members will be missing today's meeting."
)
parser.add_argument(
"--users",
"-u",
dest="users",
metavar="P",
nargs="+",
required=False,
default=[],
help="list of people to check in with (usernames only)",
)
parser.add_argument(
"--from-cron", "-c", action="store_true", help="Silence all logging statements (stdout)."
)
parser.add_argument(
"--debug", "-d", action="store_true", help="Log all events bagelbot can see."
)
parser.add_argument(
"--s3-sync",
"-s",
action="store_true",
help="Synchronize SHELVE_FILE with AWS S3 before and after checking attendance." | ,
)
parsed_args = parser.parse_args()
if parsed_args.from_cron:
with nostdout():
main(parsed_args)
else:
| main(parsed_args)
|
vishnubob/pyscad | tests/test_radial_resolution.py | Python | mit | 692 | 0.001445 | from boiler import *
class TestRadialResolution(unittest.TestCase):
def test_cylinder_radial_resolution_scad(self):
c = Cylinder(h=10, r=20, fn=10, fa=1)
answer = "cylinder(r=20.0, h=10.0, center=false, $fn=10.0);"
code_compare(c.render_scad(), answer)
c.fn = 0
answer = "cylinder(r=20.0, h=10.0, center=false, $fa=1.0 | );"
code_compare(c.render_scad(), answer)
c.fs = 3
answer = "cylinder(r=20.0, h=10.0, center=false, $fa=1.0, $fs=3.0);"
code_compare(c.render_scad(), answer)
c.fs = 2
c.fa = 2
answer = "cylinder(r=20.0, h=1 | 0.0, center=false);"
code_compare(c.render_scad(), answer)
|
shuhaowu/wrtfreezer | setup.py | Python | agpl-3.0 | 408 | 0.002451 | #!/usr/bin/env python
from distutils.core import setup
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setup(
name="wrtfreezer",
version="0.1",
description="A simple utility to mass build OpenWRT images.",
author="Shuh | ao Wu",
license="AGPL",
| url="https://github.com/shuhaowu/wrtfreezer",
packages=["wrtfreezer"],
scripts=["wrtbuild"],
requires=requirements,
)
|
hiteshgarg14/Django-Social-Website | bookmarks/account/urls.py | Python | mit | 1,562 | 0.014085 | from django.conf.urls import url
from . import views
from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# previous login view
# url(r'^login/$', views.user_login, name='login'),
# login / logout urls
url(r'^login/$', auth_views.login,name='login'),
url(r'^logout/$', auth_views.logout,name='logout'),
url(r'^logout-then-login/$', auth_views.logout_then_login,name='logout_then_login'),
url(r'^$', views.dashboard, name='dashboard'),
# change password urls
url(r'^password-change/$',auth_views.password_change,name='password_change'),
url(r'^password-change/done/$',auth_views.password_change_done,name='password_change_done'),
# restore password urls
url(r'^password-reset/$',auth_views.password_reset,name='password_reset'),
url(r'^password-reset/done/$',auth_views.password_reset_done,name='password_re | set_done'),
url(r'^password-reset/confirm/(?P<uidb64>[-\w]+)/(?P<token>[-\w]+)/$',\
auth_views.password_reset_confirm,name='password_reset_confirm'),
url(r'^password-reset/complete/$',auth_views.password_reset_complete,name='password_reset_complete'),
# register
url(r'^register/$', views.register, name='register'),
#edit user profile
url(r'^edit/$', views.edit, na | me='edit'),
url(r'^users/$', views.user_list, name='user_list'),
url(r'^users/follow/$', views.user_follow, name='user_follow'),
url(r'^users/(?P<username>[-\w]+)/$', views.user_detail, name='user_detail'),
]
|
condad/google-objects | setup.py | Python | apache-2.0 | 1,680 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import os
import sys
from setuptools import setup
from setuptools import find_packages
with io.open('README.md', 'rt', encoding='utf8') as f:
README = f.read()
if sys.argv[-1] == 'test':
os.system('python -sm unittest discover tests "*_te | st.py"')
sys.exit(0)
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist')
os.system('twine upload dist/*')
sys.exit(0)
VERSION = '0.0.7'
REQUIRES = ['google-api-python-client>=1.5.3', 'pandas>=0.22.0', 'fire>=0.1.3']
GI | THUB_URL = 'https://github.com/condad/google-objects'
setup(
name='google_objects',
packages=find_packages(),
version=VERSION,
description="A simple OO wrapper around Google's python API client",
long_description=README,
long_description_content_type='text/markdown',
author='Connor Sullivan',
author_email='sully4792@gmail.com',
install_requires=REQUIRES,
url=GITHUB_URL,
download_url='https://github.com/condad/google-objects/tarball/' + VERSION,
keywords=['google api', 'google sheets', 'google drive', 'google slides'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
entry_points={
'console_scripts': [
'sheets-cli = google_objects.cli:main',
],
},
)
|
grcanosa/my-telegram-bot | src/mybot/handler/piropos.py | Python | gpl-3.0 | 833 | 0.040816 | #!/usr/bin/python3
import emoji
import random;
from .phraselist import PhraseList;
#from ..data.teletokens import CID;
from telegram import Bot,Update;
class PiropoList(PhraseList):
def __init__(self,cmdget | = "",
cmdadd="",
filename ="",
updater=None,
userR = None,
priority = 50):
super().__init__(cmdget=cmdget,cmdadd=cmdadd,filename=filename,updater=updater,userR=userR,priority=priority);
def get_max_cmd_response(self,update):
text= update.message.from_user.first_name.split()[0];
text +=", no seas presumid@, d | eja de pedir piropos";
#return "BQADBAADKgAD15TmAAFDS0IqiyCZgwI","audio"
#return "AwADBAADJwAD15TmAAG3Lbh5kdhR6QI","voice"
return text,"message";
|
mkouhei/hatena2rest | setup.py | Python | gpl-3.0 | 2,308 | 0.004333 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (C) 2012 Kouhei Maeda <mkouhei@palmtb.net>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed | in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
from setuptools import setup, find_packages
sys.path.insert(0, 'src')
import hatena2rest
classifiers = [
"Development | Status :: 3 - Alpha",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP :: Site Management",
"Topic :: Text Processing :: Markup",
"Topic :: Text Processing :: Markup :: XML",
]
long_description = \
open(os.path.join("docs","README.rst")).read() + \
open(os.path.join("docs","HISTORY.rst")).read() + \
open(os.path.join("docs","TODO.rst")).read()
requires = ['setuptools', 'sphinx', 'tinkerer']
setup(name='hatena2rest',
version=hatena2rest.__version__,
description='converting Hatena diary to reST format',
long_description=long_description,
author='Kouhei Maeda',
author_email='mkouhei@palmtb.net',
url='https://github.com/mkouhei/hatena2rest',
license=' GNU General Public License version 3',
classifiers=classifiers,
packages=find_packages('src'),
package_dir={'': 'src'},
data_files = [],
install_requires=requires,
extras_require=dict(
test=[
'nose',
'pep8',
'unittest',
],
),
test_suite='nose.collector',
tests_require=['nose','pep8','unittest'],
entry_points="""
[console_scripts]
htn2rst = hatena2rest.command:main
""",
)
|
huanqi/leetcode-python | implement_strstr/solution.py | Python | bsd-2-clause | 483 | 0 | class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
n = len(haystack)
m = len(needle)
for i in range(n + 1 - m):
matched = True
for k in range(m):
if haystack[i + k] != needle[k]:
matched = False
| break
if | matched:
return i
return -1
|
errror/SlackRTMBot | slott/plugins/ping/__init__.py | Python | lgpl-3.0 | 63 | 0.015873 | #!/usr/bin/env py | thon3
print('ping/__init__.py')
import ping
| |
gilleshenrard/ikoab_elise | api/views.py | Python | mit | 2,172 | 0.00046 | from rest_framework.decorators import api_view
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import status
from .models import Person
from .serializers import PersonSerializer
@api_view(['GET', 'DELETE', 'PUT'])
def get_delete_update_person(request, fstname):
person = get_object_or_404(Person, firstname=fstname)
# get details of a single person
if request.method == 'GET':
serializer = PersonSerializer(person)
return Response(serializer.data)
# delete a single person
elif request.method == 'DELETE':
person.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# update details of a single person
elif request.method == 'PUT':
serializer = PersonSerializer(person, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST'])
def get_post_people(request):
# get all people
if request.method == 'GET':
people = Person.objects.all()
serializer = PersonSerializer(people, many=True)
return Response(serializer.data)
# insert a new record for a person
elif request.method == 'POST':
data = {
'firstname': request.data.get('firstname'),
'lastname': request.data.get('lastname'),
'country': request.data.get('country'),
'email': request.data.get('email'),
'phone': request.data.get('phone'),
'occupation_field': request.data.get('occupation_field'),
'occupation': request.data.get('occupation'),
'birthdate': request.data.get('birthdate'),
'description': request.data.get('description')
}
serializer = PersonSerializer(data=data)
if serializer.is_valid():
| serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
re | turn Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) |
e-mission/e-mission-server | emission/analysis/modelling/tour_model/get_users.py | Python | bsd-3-clause | 1,172 | 0.007679 | import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trip | s):
valid = False
if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - v | alid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def get_user_ls(all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
valid_user_ls.append(curr_user)
user_ls.append(curr_user)
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls
|
sinhrks/chainer | tests/cupy_tests/sorting_tests/test_search.py | Python | mit | 4,156 | 0 | import unittest
from cupy import testing
@testing.gpu
class TestSearch(unittest.TestCase):
_multiprocess_can_split_ = True
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmax_all(self, xp, dtype):
a = testing.shaped_random((2, 3), xp, dtype)
return a.argmax()
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_external_argmax_all(self, xp, dtype):
a = testing.shaped_random((2, 3), xp, dtype)
return xp.argmax(a)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmax_axis_large(self, xp, dtype):
a = testing.shaped_random((3, 1000), xp, dtype)
return a.argmax(axis=0)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_external_argmax_axis_large(self, xp, dtype):
a = testing.shaped_random((3, 1000), xp, dtype)
return xp.argmax(a, axis=0)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmax_axis0(self, xp, dtype):
a = testing.shaped_random((2, 3, 4), xp, dtype)
return a.argmax(axis=0)
@testing.for_all_dtypes()
@testing.num | py_cupy_allclose()
def test_argmax_axis1(self, xp, dtype):
a = testing.shaped_random((2, 3, 4), xp, dtype)
return a.argmax(axis=1)
@testing.for_all_dtypes()
@testing.nu | mpy_cupy_allclose()
def test_argmax_axis2(self, xp, dtype):
a = testing.shaped_random((2, 3, 4), xp, dtype)
return a.argmax(axis=2)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmin_all(self, xp, dtype):
a = testing.shaped_random((2, 3), xp, dtype)
return a.argmin()
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_external_argmin_all(self, xp, dtype):
a = testing.shaped_random((2, 3), xp, dtype)
return xp.argmin(a)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmin_axis_large(self, xp, dtype):
a = testing.shaped_random((3, 1000), xp, dtype)
return a.argmin(axis=0)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_external_argmin_axis_large(self, xp, dtype):
a = testing.shaped_random((3, 1000), xp, dtype)
return xp.argmin(a, axis=0)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmin_axis0(self, xp, dtype):
a = testing.shaped_random((2, 3, 4), xp, dtype)
return a.argmin(axis=0)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmin_axis1(self, xp, dtype):
a = testing.shaped_random((2, 3, 4), xp, dtype)
return a.argmin(axis=1)
@testing.for_all_dtypes()
@testing.numpy_cupy_allclose()
def test_argmin_axis2(self, xp, dtype):
a = testing.shaped_random((2, 3, 4), xp, dtype)
return a.argmin(axis=2)
@testing.parameterize(
{'cond_shape': (2, 3, 4), 'x_shape': (2, 3, 4), 'y_shape': (2, 3, 4)},
{'cond_shape': (4,), 'x_shape': (2, 3, 4), 'y_shape': (2, 3, 4)},
{'cond_shape': (2, 3, 4), 'x_shape': (2, 3, 4), 'y_shape': (3, 4)},
{'cond_shape': (3, 4), 'x_shape': (2, 3, 4), 'y_shape': (4,)},
)
class TestWhere(unittest.TestCase):
@testing.for_all_dtypes_combination(
names=['cond_type', 'x_type', 'y_type'])
@testing.numpy_cupy_allclose()
def test_where(self, xp, cond_type, x_type, y_type):
m = testing.shaped_random(self.cond_shape, xp, xp.bool_)
# Almost all values of a matrix `shaped_random` makes are not zero.
# To make a sparse matrix, we need multiply `m`.
cond = testing.shaped_random(self.cond_shape, xp, cond_type) * m
x = testing.shaped_random(self.x_shape, xp, x_type)
y = testing.shaped_random(self.y_shape, xp, y_type)
return xp.where(cond, x, y)
class TestWhereError(unittest.TestCase):
@testing.numpy_cupy_raises()
def test_one_argument(self, xp):
cond = testing.shaped_random((3, 4), xp, dtype=xp.bool_)
x = testing.shaped_random((2, 3, 4), xp, xp.int32)
xp.where(cond, x)
|
rigetticomputing/pyquil | pyquil/parser.py | Python | apache-2.0 | 1,511 | 0 | ##############################################################################
# Copyright 2016-2018 Rigetti Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""
Module for parsing Quil programs from text into PyQuil objects
"""
from typing import List
from pyquil._parser.parser import run_parser
from pyquil.quil import Program
from pyquil.quilbase import AbstractInstruction
def parse_program(quil: str) -> Program:
"""
Parse a raw Quil program and return a PyQuil program.
:param str quil: a single or multiline Quil program
:return: PyQuil Program object
"""
return Program(parse(q | uil))
def parse(quil: str) -> List[AbstractInstruction]:
"""
Parse a raw Quil program and return a corresponding list of PyQuil objects.
:param str quil: a single or multiline Quil program
:return: list of instructions
| """
p = run_parser(quil)
return p
|
greggyNapalm/lunaport_client | lunaport_client/tests/test_server_resource.py | Python | apache-2.0 | 1,964 | 0.00611 | #import sys
#sys.path.append('../')
import responses
import requests
#from lunaport_client import LunaportClinetv1
#import lunaport_client as lc
#print lc.__version__
#import .. lunaport_client
#from ..lunaport_client import LunaportClinet
#from lunaport_client import LunaportClinet as lunac
#lunac = lunaport_client.LunaportClinetv1()
from ..lunaport_client.http_client import LunaportClinet
@responses.activate
def test_my_api():
responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
body='{"error": "not found"}', status=404,
content_type='application/json')
resp = requests.get('http://twitter.com/api/1/foobar')
assert resp.json() == {"error": "not found"}
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://twitter.com/api/1/foobar'
assert responses.calls[0].response.content == '{"error": "not found"}'
@responses.activate
def test_my_XXX():
responses.add(responses.GET, 'http:/ | /twitter.com/api/1/foobar',
body='{"error": "not found"}', status=404,
content_type='application/json')
resp = requests.get('http://twitter.com/api/1/foobar')
assert resp.json() == {"error": "not found"}
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://twitter.com/api/1/foobar'
assert responses.calls[0].response.content == '{"error": "not found"}'
@responses.activate
de | f test_my_YYY():
responses.add(responses.GET, 'http://twitter.com/api/1/foobar',
body='{"error": "not found"}', status=404,
content_type='application/json')
resp = requests.get('http://twitter.com/api/1/foobar')
assert resp.json() == {"error": "not found"}
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://twitter.com/api/1/foobar'
assert responses.calls[0].response.content == '{"error": "not found"}'
|
Microvellum/Fluid-Designer | win64-vc/2.78/Python/bin/2.78/scripts/addons/rigify/rigs/pitchipoy/tentacle.py | Python | gpl-3.0 | 16,078 | 0.020152 | import bpy
from ...utils import copy_bone
from ...utils import strip_org, make_deformer_name, connected_children_names
from ...utils import make_mechanism_name, put_bone, create_sphere_widget
from ...utils import create_widget, create_circle_widget
from ...utils import MetarigError
from rna_ | prop_ui import rna_i | dprop_ui_prop_get
script = """
controls = [%s]
master_name = '%s'
if is_selected( controls ):
layout.prop( pose_bones[ master_name ], '["%s"]', slider = True )
layout.prop( pose_bones[ master_name ], '["%s"]', slider = True )
"""
class Rig:
def __init__(self, obj, bone_name, params):
self.obj = obj
self.org_bones = [bone_name] + connected_children_names(obj, bone_name)
self.params = params
if params.tweak_extra_layers:
self.tweak_layers = list( params.tweak_layers )
else:
self.tweak_layers = None
if len(self.org_bones) <= 1:
raise MetarigError(
"RIGIFY ERROR: invalid rig structure" % (strip_org(bone_name))
)
def make_mch( self ):
bpy.ops.object.mode_set(mode ='EDIT')
eb = self.obj.data.edit_bones
org_bones = self.org_bones
mch_parent = self.obj.data.bones[ org_bones[0] ].parent
mch_parent_name = mch_parent.name # Storing the mch parent's name
if not mch_parent:
mch_parent = self.obj.data.edit_bones[ org_bones[0] ]
mch_bone = copy_bone(
self.obj,
mch_parent_name,
make_mechanism_name( strip_org( org_bones[0] ) )
)
else:
mch_bone = copy_bone(
self.obj,
mch_parent_name,
make_mechanism_name( strip_org( org_bones[0] ) )
)
put_bone( self.obj, mch_bone, eb[ mch_parent_name ].tail )
eb[ mch_bone ].length /= 4 # reduce length to fourth of original
return mch_bone
def make_master( self ):
bpy.ops.object.mode_set(mode ='EDIT')
org_bones = self.org_bones
master_bone = copy_bone(
self.obj,
org_bones[0],
"master_" + strip_org( org_bones[0] )
)
# Make widgets
bpy.ops.object.mode_set(mode ='OBJECT')
create_square_widget( self.obj, master_bone )
return master_bone
def make_controls( self ):
bpy.ops.object.mode_set(mode ='EDIT')
org_bones = self.org_bones
ctrl_chain = []
for i in range( len( org_bones ) ):
name = org_bones[i]
ctrl_bone = copy_bone(
self.obj,
name,
strip_org(name)
)
ctrl_chain.append( ctrl_bone )
# Make widgets
bpy.ops.object.mode_set(mode ='OBJECT')
for ctrl in ctrl_chain:
create_circle_widget(self.obj, ctrl, radius=0.3, head_tail=0.5)
return ctrl_chain
def make_tweaks( self ):
bpy.ops.object.mode_set(mode ='EDIT')
eb = self.obj.data.edit_bones
org_bones = self.org_bones
tweak_chain = []
for i in range( len( org_bones ) + 1 ):
if i == len( org_bones ):
# Make final tweak at the tip of the tentacle
name = org_bones[i-1]
else:
name = org_bones[i]
tweak_bone = copy_bone(
self.obj,
name,
"tweak_" + strip_org(name)
)
tweak_e = eb[ tweak_bone ]
tweak_e.length /= 2 # Set size to half
if i == len( org_bones ):
# Position final tweak at the tip
put_bone( self.obj, tweak_bone, eb[ org_bones[-1]].tail )
tweak_chain.append( tweak_bone )
# Make widgets
bpy.ops.object.mode_set(mode = 'OBJECT')
for tweak in tweak_chain:
create_sphere_widget( self.obj, tweak )
tweak_pb = self.obj.pose.bones[ tweak ]
# Set locks
if tweak_chain.index( tweak ) != len( tweak_chain ) - 1:
tweak_pb.lock_rotation = (True, False, True)
tweak_pb.lock_scale = (False, True, False)
else:
tweak_pb.lock_rotation_w = True
tweak_pb.lock_rotation = (True, True, True)
tweak_pb.lock_scale = (True, True, True)
# Set up tweak bone layers
if self.tweak_layers:
tweak_pb.bone.layers = self.tweak_layers
return tweak_chain
def make_deform( self ):
bpy.ops.object.mode_set(mode ='EDIT')
org_bones = self.org_bones
def_chain = []
for i in range( len( org_bones ) ):
name = org_bones[i]
def_bone = copy_bone(
self.obj,
name,
make_deformer_name(strip_org(name))
)
def_chain.append( def_bone )
return def_chain
def parent_bones( self, all_bones ):
bpy.ops.object.mode_set(mode ='EDIT')
org_bones = self.org_bones
eb = self.obj.data.edit_bones
""" for category in all_bones:
if isinstance( all_bones[category], list ):
for bone in all_bones[category]:
print( "Bone: " + bone )
eb[bone].parent = None
else:
eb[ all_bones[category] ].parent = None
"""
# mch bone remains parentless and will be parented to root by rigify
# Parent master bone
# eb[ all_bones['master'] ].parent = eb[ all_bones['mch'] ]
# Parent control bones
# ctrls_n_parent = [ all_bones['master'] ] + all_bones['control']
for bone in ctrls_n_parent[1:]:
previous_index = ctrls_n_parent.index( bone ) - 1
eb[ bone ].parent = eb[ ctrls_n_parent[previous_index] ]
# Parent tweak bones
tweaks = all_bones['tweak']
for tweak in all_bones['tweak']:
parent = ''
if tweaks.index( tweak ) == len( tweaks ) - 1:
parent = all_bones['control'][ -1 ]
else:
parent = all_bones['control'][ tweaks.index( tweak ) ]
eb[ tweak ].parent = eb[ parent ]
# Parent deform bones
for bone in all_bones['deform'][1:]:
previous_index = all_bones['deform'].index( bone ) - 1
eb[ bone ].parent = eb[ all_bones['deform'][previous_index] ]
eb[ bone ].use_connect = True
# Parent org bones ( to tweaks by default, or to the controls )
for org, tweak in zip( org_bones, all_bones['tweak'] ):
eb[ org ].parent = eb[ tweak ]
def make_constraints( self, all_bones ):
bpy.ops.object.mode_set(mode ='OBJECT')
org_bones = self.org_bones
pb = self.obj.pose.bones
## MCH bone constraints
if pb[ org_bones[0] ].parent:
mch_pb = pb[ all_bones['mch'] ]
con = mch_pb.constraints.new('COPY_LOCATION')
con.target = self.obj
con.subtarget = pb[ org_bones[0] ].parent.name
con.head_tail = 1.0
con = mch_pb.constraints.new('COPY_ROTATION')
con.target = self.obj
con.subtarget = pb[ org_bones[0] ].parent.name
con = mch_pb.constraints.new('COPY_SCALE')
con.target = self.obj
con.subtarget = pb[ org_bones[0] ].parent.name
"""
# Setting the MCH prop
master_pb = pb[ all_bones['master'] ]
prop_name_r = "rotation_follow"
prop_name_s = "scale_follow"
prop_names = [ prop_name_r, prop_name_s ]
|
bl4ckdu5t/registron | tests/interactive/test_ipython.py | Python | mit | 441 | 0.004535 | #--------------------------------------------------------- | --------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from IPython import e | mbed
embed()
|
online-behaviour/machine-learning | getTweetText.py | Python | apache-2.0 | 1,768 | 0.010181 | #!/usr/bin/python3 -W all
"""
getTweetText.py: extract tweet text from json file
usage: getTweetText.py < file
20170418 erikt(at)xs4all.nl
"""
import csv
import json
import re
import sys
# command name for error messages
COMMAND = sys.argv[0]
patternNewline = re.compile("\n")
# open csv output
with sys.stdout as csvfile:
outFile = csv.writer(csvfile,delimiter=",",quotechar='"')
# repeat for each input line
for line in sys.stdin:
# convert the line to a json dictionary
jsonLine = json.loads(line)
# test for presence of required fields
if not "id_str" in jsonLine: sys.exit(COMMAND+" missing id_str field")
if not "text" in jsonLine: sys.exit(COMMAND+" missing text field")
if not "user" in jsonLine: sys.exit(COMMAND+" missing user field")
if not "screen_name" in jsonLine["user"]:
sys.exit(COMMAND+" missing screen_name field")
if not "created_at" in jsonLine["user"]:
sys.exit(COMMAND+" missing created_at field")
if not "in_reply_to_status_id_str" in jsonLine:
sys.exit(COMMAND+" missing in_reply_to_status_id_str field")
# print the text in csv format
thisId = jsonLine["id_str"]
replyId = jsonLine["in_reply_to_status_id_str"]
if replyId == None and "retweeted_status" in jsonLine and \
| "in_reply_to_status_id_str" in jsonLine["retweeted_status"]:
replyId = jsonLine["retweeted_status"]["in_reply_to_status_id_str"]
screenName = jsonLine["user"]["screen_ | name"]
date = jsonLine["created_at"]
text = jsonLine["text"]
text = patternNewline.sub(" ",text)
outFile.writerow([thisId,replyId,date,screenName,text])
csvfile.close()
|
Symmetry-Innovations-Pty-Ltd/Python-2.7-for-QNX6.5.0-x86 | usr/pkg/lib/python2.7/distutils/command/bdist_dumb.py | Python | mit | 5,127 | 0.00195 | """distutils.command.bdist_dumb
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
distribution -- i.e., just an archive to be unpacked under $prefix or
$exec_prefix)."""
__revision__ = "$Id$"
import os
from sysconfig import get_python_version
from distutils.util import get_platform
from distutils.core import Command
from distutils.dir_util import remove_tree, ensure_relative
from distutils.errors import DistutilsPlatformError
from distutils import log
class bdist_dumb (Command):
description = 'create a "dumb" built distribution'
user_options = [('bdist-dir=', 'd',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('format=', 'f',
"archive format to create (tar, ztar, gztar, zip)"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
'nt': 'zip',
'os2': 'zip' }
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.format = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.relative = 0
self.owner = None
self.group = None
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'dumb')
if self.format is None:
try:
self.format = self.default_format[os.name]
except KeyError:
raise DistutilsPlatformError, \
("don't know how to create dumb built distributions " +
"on platform %s") % os.name
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'))
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
log.info("installing to %s" % self.bdist_dir)
self.run_command('install')
# And make an archive relative to the root of the
# pseudo-installation tree.
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
# OS/2 objects to any ":" characters in a filename (such as when
# a timestamp is used in a version) so change them to hyphens.
if os.name == "os2":
archive_basename = archive_basename.replace(":", "-")
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
if (self.distribution.has_ext_modules() and
(install.install_base != install.install_platbase)):
raise DistutilsPlatformError, \
("can't make a dumb built distribution where "
"base and platbase are different (%s, %s)"
% (repr(install.i | nstall_base),
repr(install.install_platbase)))
else:
archive_root = os.path.join(self.bdist_dir,
ensure_relative(install.install_base))
# Make the archive
filename = self.make_archiv | e(pseudoinstall_root,
self.format, root_dir=archive_root,
owner=self.owner, group=self.group)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_dumb', pyversion,
filename))
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
plotly/plotly.py | packages/python/plotly/plotly/validators/scatterternary/_selectedpoints.py | Python | mit | 435 | 0.002299 | import _plotly_utils.basevalidators
class SelectedpointsValidator(_plotly_utils.basevalidators.AnyValidator):
def | __init__(
self, plotly_name="selectedpoints", parent_name="scatterternary", **kwargs
):
super(SelectedpointsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
| **kwargs
)
|
nkgilley/home-assistant | tests/components/juicenet/__init__.py | Python | apache-2.0 | 40 | 0 | """Te | sts for | the JuiceNet component."""
|
hacktoberfest17/programming | hello_world/python/hello_world_py3.py | Python | gpl-3.0 | 23 | 0.043478 | pr | int ("Hello World | !")
|
errbotio/errbot | errbot/templates/initdir/example.py | Python | gpl-3.0 | 667 | 0.001499 | from errbot import BotPlugin, botcmd
class Example(BotPlugin):
"""
This is a very basic plugin to try out your new installation and get you started.
Feel free to tweak me to experiment with Errbot.
You can find me in your init directory in the subdirectory plugins.
"""
@botcmd # flags a command
def tryme(self, msg, args): # a command callable with !tryme
"""
Execute to check if Errbot responds to command.
Feel free to | tweak me to experiment with Errbot.
You can | find me in your init directory in the subdirectory plugins.
"""
return "It *works*!" # This string format is markdown.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.