repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
evilsocket/arminject
|
trace_pid.py
|
Python
|
bsd-3-clause
| 2,396
| 0.005426
|
# Copyright (c) 2015, Simone Margaritelli <evilsocket at gmail dot com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the f
|
ollowing disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of ARM Inject nor the names of its contributors may be used
# to endorse or promote products derived fro
|
m this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from pyadb.adb import ADB
import sys
if len(sys.argv) != 2:
print "Usage: python %s <pid>" % sys.argv[0]
quit()
pid = int(sys.argv[1])
try:
adb = ADB()
print "@ Pushing files to /data/local/tmp ..."
adb.sh( "rm -rf /data/local/tmp/injector /data/local/tmp/libhook.so" )
adb.push( "libs/armeabi-v7a/injector", "/data/local/tmp/injector" )
adb.push( "libs/armeabi-v7a/libhook.so", "/data/local/tmp/libhook.so" )
adb.sh( "chmod 777 /data/local/tmp/injector" )
# we need to set selinux to permissive in order to make ptrace work
adb.set_selinux_level( 0 )
adb.clear_log()
print "@ Injection into PID %d starting ..." % pid
adb.sudo( "/data/local/tmp/injector %d /data/local/tmp/libhook.so" % pid )
adb.logcat("LIBHOOK")
except KeyboardInterrupt:
pass
|
uclouvain/osis_louvain
|
base/migrations/0198_auto_20171130_1602.py
|
Python
|
agpl-3.0
| 1,184
| 0.002534
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-11-30 15:02
from __future__ import unicode_literals
from django.db import migrations, utils
from django.db import transaction
def copy_previous_pgrm_to_current_lunit(apps, model):
base = apps.get_app_config('base')
ProgramManager = base.get_model('programmanager')
OfferYear = base.get_model('offeryear')
previous_pgrm = ProgramManager.objects.filter(offer_year__academic_year__year=2016)\
.select_related('offer_year__offer')
for pgrm in previous_pgrm:
new_offer_year = OfferYear.objects.filter(offer=pgrm.offer_year.offer, academic_year__year=2017).first()
if new_offer_year:
try:
with transaction.atomic():
pgrm.pk = None
pgrm.offer_year = new_offer_year
pgrm.save()
except utils.Integri
|
tyError:
print("Duplicated.")
class Migration(migrations.Migration):
dependencies = [
('base', '0197_auto_20171130_0823'),
]
operations =
|
[
migrations.RunPython(copy_previous_pgrm_to_current_lunit),
]
|
kogotko/carburetor
|
openstack_dashboard/dashboards/project/security_groups/panel.py
|
Python
|
apache-2.0
| 783
| 0
|
# Copyright 2017 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE
|
-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
class SecurityGroups(horizon.Panel):
name = _("Security Groups")
slug = 'security_group
|
s'
|
algorythmic/bash-completion
|
test/t/test_pkgconf.py
|
Python
|
gpl-2.0
| 548
| 0
|
import os
import pytest
@pytest.mark.bashcomp(cmd="pkgconf")
class TestPkgconf:
@pytest.mark.complete("pkgconf ")
def test_1(self, completion):
assert completion
@pytest.mark.complete("pkgconf -", require_cmd=True)
def test_2(self, completion):
assert completion
@pytest.mark.complete(
"pkgconf %s/bash-comp
|
letion.pc --va
|
riable="
% os.getenv("ABS_TOP_BUILDDIR", "../.."),
require_cmd=True,
)
def test_variable(self, completion):
assert "completionsdir" in completion
|
gppezzi/easybuild-framework
|
easybuild/tools/build_log.py
|
Python
|
gpl-2.0
| 13,380
| 0.00299
|
# #
# Copyright 2009-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
EasyBuild logger and log utilities, including our own EasybuildError class.
:author: Stijn De Weirdt (Ghent University)
:author: Dries Verdegem (Ghent University)
:author: Kenneth Hoste (Ghent University)
:author: Pieter De Baets (Ghent University)
:author: Jens Timmerman (Ghent University)
"""
import logging
import os
import re
import sys
import tempfile
from copy import copy
from datetime import datetime
from easybuild.base import fancylogger
from easybuild.base.exceptions import LoggedException
from easybuild.tools.version import VERSION, this_is_easybuild
# EasyBuild message prefix
EB_MSG_PREFIX = "=="
# the version seen by log.deprecated
CURRENT_VERSION = VERSION
# allow some experimental experimental code
EXPERIMENTAL = False
DEPRECATED_DOC_URL = 'http://easybuild.readthedocs.org/en/latest/Deprecated-functionality.html'
DRY_RUN_BUILD_DIR = None
DRY_RUN_SOFTWARE_INSTALL_DIR = None
DRY_RUN_MODULES_INSTALL_DIR = None
DEVEL_LOG_LEVEL = logging.DEBUG - 1
logging.addLevelName(
|
DEVEL_LOG_LEVEL, 'DEVEL')
class EasyBuildError(LoggedException):
"""
EasyBuildError is thrown when EasyBuild runs into something horribly wrong.
"""
LOC_INFO_TOP_PKG_NAMES = ['easybuild', 'vsc']
LOC_INFO_LEVEL = 1
# always include location where error was raised from, even under 'python -O'
INCLUDE_LOCATION = True
def __init__(self, msg, *args):
"""Constructor: initialise EasyBuildError instance."""
if args:
|
msg = msg % args
LoggedException.__init__(self, msg)
self.msg = msg
def __str__(self):
"""Return string representation of this EasyBuildError instance."""
return repr(self.msg)
def raise_easybuilderror(msg, *args):
"""Raise EasyBuildError with given message, formatted by provided string arguments."""
raise EasyBuildError(msg, *args)
class EasyBuildLog(fancylogger.FancyLogger):
"""
The EasyBuild logger, with its own error and exception functions.
"""
RAISE_EXCEPTION_CLASS = EasyBuildError
def caller_info(self):
"""Return string with caller info."""
# findCaller returns a 3-tupe in Python 2, a 4-tuple in Python 3 (stack info as extra element)
(filepath, line, function_name) = self.findCaller()[:3]
filepath_dirs = filepath.split(os.path.sep)
for dirName in copy(filepath_dirs):
if dirName != "easybuild":
filepath_dirs.remove(dirName)
else:
break
if not filepath_dirs:
filepath_dirs = ['?']
return "(at %s:%s in %s)" % (os.path.join(*filepath_dirs), line, function_name)
def experimental(self, msg, *args, **kwargs):
"""Handle experimental functionality if EXPERIMENTAL is True, otherwise log error"""
common_msg = "Experimental functionality. Behaviour might change/be removed later"
if EXPERIMENTAL:
msg = common_msg + ': ' + msg
self.warning(msg, *args, **kwargs)
else:
msg = common_msg + " (use --experimental option to enable): " + msg
raise EasyBuildError(msg, *args)
def deprecated(self, msg, ver, max_ver=None, more_info=None, silent=False, *args, **kwargs):
"""
Print deprecation warning or raise an exception, depending on specified version(s)
:param: msg: deprecation message
:param ver: if max_ver is None: threshold for EasyBuild version to determine warning vs exception
else: version to check against max_ver to determine warning vs exception
:param max_ver: version threshold for warning vs exception (compared to 'ver')
:param more_info: additional message with instructions where to get more information
:param silent: stay silent (don't *print* deprecation warnings, only log them)
"""
# provide log_callback function that both logs a warning and prints to stderr
def log_callback_warning_and_print(msg):
"""Log warning message, and also print it to stderr."""
self.warning(msg)
print_warning(msg, silent=silent)
kwargs['log_callback'] = log_callback_warning_and_print
# always raise an EasyBuildError, nothing else
kwargs['exception'] = EasyBuildError
if max_ver is None:
if more_info:
msg += more_info
else:
msg += "; see %s for more information" % DEPRECATED_DOC_URL
fancylogger.FancyLogger.deprecated(self, msg, str(CURRENT_VERSION), ver, *args, **kwargs)
else:
fancylogger.FancyLogger.deprecated(self, msg, ver, max_ver, *args, **kwargs)
def nosupport(self, msg, ver):
"""Print error message for no longer supported behaviour, and raise an EasyBuildError."""
nosupport_msg = "NO LONGER SUPPORTED since v%s: %s; see %s for more information"
raise EasyBuildError(nosupport_msg, ver, msg, DEPRECATED_DOC_URL)
def error(self, msg, *args, **kwargs):
"""Print error message and raise an EasyBuildError."""
ebmsg = "EasyBuild crashed with an error %s: " % self.caller_info()
fancylogger.FancyLogger.error(self, ebmsg + msg, *args, **kwargs)
def devel(self, msg, *args, **kwargs):
"""Print development log message"""
self.log(DEVEL_LOG_LEVEL, msg, *args, **kwargs)
def exception(self, msg, *args):
"""Print exception message and raise EasyBuildError."""
# don't raise the exception from within error
ebmsg = "EasyBuild encountered an exception %s: " % self.caller_info()
fancylogger.FancyLogger.exception(self, ebmsg + msg, *args)
# set format for logger
LOGGING_FORMAT = EB_MSG_PREFIX + ' %(asctime)s %(filename)s:%(lineno)s %(levelname)s %(message)s'
fancylogger.setLogFormat(LOGGING_FORMAT)
# set the default LoggerClass to EasyBuildLog
fancylogger.logging.setLoggerClass(EasyBuildLog)
# you can't easily set another LoggerClass before fancylogger calls getLogger on import
_init_fancylog = fancylogger.getLogger(fname=False)
del _init_fancylog.manager.loggerDict[_init_fancylog.name]
# we need to make sure there is a handler
fancylogger.logToFile(filename=os.devnull, max_bytes=0)
# EasyBuildLog
_init_easybuildlog = fancylogger.getLogger(fname=False)
def init_logging(logfile, logtostdout=False, silent=False, colorize=fancylogger.Colorize.AUTO, tmp_logdir=None):
"""Initialize logging."""
if logtostdout:
fancylogger.logToScreen(enable=True, stdout=True, colorize=colorize)
else:
if logfile is None:
# if logdir is specified but doesn't exist yet, create it first
if tmp_logdir and not os.path.exists(tmp_logdir):
try:
os.makedirs(tmp_logdir)
except (IOError, OSError) as err:
raise EasyBuildError("Failed to create temporary log directory %s: %s", tmp_logdir, err)
# mkstemp returns (fd,filename), fd is from os.open, not regular open!
fd, logfile = tempfile.mkstemp(suffix='.log', prefix='e
|
mr-ping/tornado
|
tornado/httpclient.py
|
Python
|
apache-2.0
| 27,522
| 0.000145
|
"""Blocking and non-blocking HTTP client interfaces.
This module defines a common interface shared by two implementations,
``simple_httpclient`` and ``curl_httpclient``. Applications may either
instantiate their chosen implementation class directly or use the
`AsyncHTTPClient` class from this module, which selects an implementation
that can be overridden with the `AsyncHTTPClient.configure` method.
The default implementation is ``simple_httpclient``, and this is expected
to be suitable for most users' needs. However, some applications may wish
to switch to ``curl_httpclient`` for reasons such as the following:
* ``curl_httpclient`` has some features not found in ``simple_httpclient``,
including support for HTTP proxies and the ability to use a specified
network interface.
* ``curl_httpclient`` is more likely to be compatible with sites that are
not-quite-compliant with the HTTP spec, or sites that use little-exercised
features of HTTP.
* ``curl_httpclient`` is faster.
* ``curl_httpclient`` was the default prior to Tornado 2.0.
Note that if you are using ``curl_httpclient``, it is highly
recommended that you use a recent version of ``libcurl`` and
``pycurl``. Currently the minimum supported version of libcurl is
7.22.0, and the minimum version of pycurl is 7.18.2. It is highly
recommended that your ``libcurl`` installation is built with
asynchronous DNS resolver (threaded or c-ares), otherwise you may
encounter various problems with request timeouts (for more
information, see
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS
and comments in curl_httpclient.py).
To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import time
import weakref
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str
from tornado import httputil, stack_context
from tornado.ioloop import IOLoop
from tornado.util import Configurable
class HTTPClient(object):
"""A blocking HTTP client.
This interface is provided for convenience and testing; most applications
that are running an IOLoop will want to use `AsyncHTTPClient` instead.
Typical usage looks like this::
http_client = httpclient.HTTPClient()
try:
response = http_client.fetch("http://www.google.com/")
print(response.body)
except httpclient.HTTPError as e:
# HTTPError is raised for non-200 responses; the response
# can be found in e.response.
print("Error: " + str(e))
except Exception as e:
# Other errors are possible, such as IOError.
print("Error: " + str(e))
http_client.close()
"""
def __init__(self, async_client_class=None, **kwargs):
self._io_loop = IOLoop(make_current=False)
if async_client_class is None:
async_client_class = AsyncHTTPClient
self._async_client = async_client_class(self._io_loop, **kwargs)
self._closed = False
def __del__(self):
self.close()
def close(self):
"""Closes the HTTPClient, freeing any resources used."""
if not self._closed:
self._async_client.close()
self._io_loop.close()
self._closed = True
def fetch(self, request, **kwargs):
"""Executes a request, returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
If an error occurs during the fetch, we raise an `HTTPError` unless
the ``raise_error`` keyword argument is set to False.
"""
response = self._io_loop.run_sync(functools.partial(
self._async_client.fetch, request, **kwargs))
|
return response
class AsyncHTTPClient(Configurable):
"""An non-blocking HTTP client.
Example usage::
def handle_response(response):
|
if response.error:
print("Error: %s" % response.error)
else:
print(response.body)
http_client = AsyncHTTPClient()
http_client.fetch("http://www.google.com/", handle_response)
The constructor for this class is magic in several respects: It
actually creates an instance of an implementation-specific
subclass, and instances are reused as a kind of pseudo-singleton
(one per `.IOLoop`). The keyword argument ``force_instance=True``
can be used to suppress this singleton behavior. Unless
``force_instance=True`` is used, no arguments other than
``io_loop`` should be passed to the `AsyncHTTPClient` constructor.
The implementation subclass as well as arguments to its
constructor can be set with the static method `configure()`
All `AsyncHTTPClient` implementations support a ``defaults``
keyword argument, which can be used to set default values for
`HTTPRequest` attributes. For example::
AsyncHTTPClient.configure(
None, defaults=dict(user_agent="MyUserAgent"))
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
"""
@classmethod
def configurable_base(cls):
return AsyncHTTPClient
@classmethod
def configurable_default(cls):
from tornado.simple_httpclient import SimpleAsyncHTTPClient
return SimpleAsyncHTTPClient
@classmethod
def _async_clients(cls):
attr_name = '_async_client_dict_' + cls.__name__
if not hasattr(cls, attr_name):
setattr(cls, attr_name, weakref.WeakKeyDictionary())
return getattr(cls, attr_name)
def __new__(cls, io_loop=None, force_instance=False, **kwargs):
io_loop = io_loop or IOLoop.current()
if force_instance:
instance_cache = None
else:
instance_cache = cls._async_clients()
if instance_cache is not None and io_loop in instance_cache:
return instance_cache[io_loop]
instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop,
**kwargs)
# Make sure the instance knows which cache to remove itself from.
# It can't simply call _async_clients() because we may be in
# __new__(AsyncHTTPClient) but instance.__class__ may be
# SimpleAsyncHTTPClient.
instance._instance_cache = instance_cache
if instance_cache is not None:
instance_cache[instance.io_loop] = instance
return instance
def initialize(self, io_loop, defaults=None):
self.io_loop = io_loop
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
self._closed = False
def close(self):
"""Destroys this HTTP client, freeing any file descriptors used.
This method is **not needed in normal use** due to the way
that `AsyncHTTPClient` objects are transparently reused.
``close()`` is generally only necessary when either the
`.IOLoop` is also being closed, or the ``force_instance=True``
argument was used when creating the `AsyncHTTPClient`.
No other methods may be called on the `AsyncHTTPClient` after
``close()``.
"""
if self._closed:
return
self._closed = True
if self._instance_cache is not None:
if self._instance_cache.get(self.io_loop) is not self:
raise RuntimeError("inconsistent AsyncHTTPClient cache")
del self._instance_cache[self.io_loop]
def fetch(self, request, callback=None, raise_error=True, **kwargs):
"""Executes a request, asynchronously returning an `HTTPResponse`.
The request may be either a
|
linkitspa/l10n-italy
|
l10n_it_withholding_tax_causali/__manifest__.py
|
Python
|
agpl-3.0
| 677
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2018 Lorenzo Battistini - Agile Business Group
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl).
{
"name": "Causali pagamento per ritenute d'acconto",
"version": "10.0.1.0.0",
"development_status": "Beta",
"category": "Hidden",
"website
|
": "https://github.com/OCA/l10n-italy",
"author": "Agile Business Group, Odoo Community Association (OCA)",
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": [
"l10n_it_wi
|
thholding_tax",
"l10n_it_causali_pagamento",
],
"data": [
"views/withholding_tax.xml",
],
'auto_install': True,
}
|
srvg/ansible
|
test/units/galaxy/test_collection.py
|
Python
|
gpl-3.0
| 38,071
| 0.002789
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import pytest
import re
import tarfile
import uuid
from hashlib import sha256
from io import BytesIO
from units.compat.mock import MagicMock, mock_open, patch
import ansible.constants as C
from ansible import context
from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.six.moves import builtins
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash_s
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture()
def collection_input(tmp_path_factory):
''' Creates a collection skeleton directory for build tests '''
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
galaxy_args = ['ansible-galaxy', 'collection', 'init', '%s.%s' % (namespace, collection),
'-c', '--init-path', test_dir, '--collection-skeleton', skeleton]
GalaxyCLI(args=galaxy_args).run()
collection_dir = os.path.join(test_dir, namespace, collection)
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Output'))
return collection_dir, output_dir
@pytest.fixture()
def collection_artifact(monkeypatch, tmp_path_factory):
''' Creates a temp collection artifact and mocked open_url instance for publishing tests '''
mock_open = MagicMock()
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
mock_uuid = MagicMock()
mock_uuid.return_value.hex = 'uuid'
monkeypatch.setattr(uuid, 'uuid4', mock_uuid)
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
input_file = to_text(tmp_path / 'collection.tar.gz')
with tarfile.open(input_file, 'w:gz') as tfile:
b_io = BytesIO(b"\x00\x01\x02\x03")
tar_info = tarfile.TarInfo('test')
tar_info.size = 4
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
return input_file, mock_open
@pytest.fixture()
def galaxy_yml_dir(request, tmp_path_factory):
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
with open(b_galaxy_yml, 'wb') as galaxy_obj:
galaxy_obj.write(to_bytes(request.param))
yield b_test_dir
@pytest.fixture()
def tmp_tarfile(tmp_path_factory, manifest_info):
''' Creates a temporary tar file for _extract_tar_file tests '''
filename = u'ÅÑŚÌβŁÈ'
temp_dir = to_bytes(tmp_path_factory.mktemp('test-%s Collections' % to_native(filename)))
tar_file = os.path.join(temp_dir, to_bytes('%s.tar.gz' % filename))
data = os.urandom(8)
with tarfile.open(tar_file, 'w:gz') as tfile:
b_io = BytesIO(data)
tar_info = tarfile.TarInfo(filename)
tar_info.size = len(data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
b_data = to_bytes(json.dumps(manifest_info, indent=True), errors='surrogate_or_strict')
b_io = BytesIO(b_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(b_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
sha256_hash = sha256()
sha256_hash.update(data)
with tarfile.open(tar_file, 'r') as tfile:
yield temp_dir, tfile, filename, sha256_hash.hexdigest()
@pytest.fixture()
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com',
token=token.GalaxyToken(token='key'))
return galaxy_api
@pytest.fixture()
def manifest_template():
def get_manifest_info(namespace='ansible_namespace', name='collection', version='0.1.0'):
return {
"collection_info": {
"namespace": namespace,
"name": name,
"version": version,
"authors": [
"shertel"
],
"readme": "README.md",
"tags": [
|
"test",
"collection"
],
"description": "Test",
"license": [
"MIT"
],
"license_file": None,
"dependencies": {},
"repositor
|
y": "https://github.com/{0}/{1}".format(namespace, name),
"documentation": None,
"homepage": None,
"issues": None
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "files_manifest_checksum",
"format": 1
},
"format": 1
}
return get_manifest_info
@pytest.fixture()
def manifest_info(manifest_template):
return manifest_template()
@pytest.fixture()
def files_manifest_info():
return {
"files": [
{
"name": ".",
"ftype": "dir",
"chksum_type": None,
"chksum_sha256": None,
"format": 1
},
{
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "individual_file_checksum",
"format": 1
}
],
"format": 1}
@pytest.fixture()
def manifest(manifest_info):
b_data = to_bytes(json.dumps(manifest_info))
with patch.object(builtins, 'open', mock_open(read_data=b_data)) as m:
with open('MANIFEST.json', mode='rb') as fake_file:
yield fake_file, sha256(b_data).hexdigest()
@pytest.fixture()
def server_config(monkeypatch):
monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1', 'server2', 'server3'])
default_options = dict((k, None) for k, v in SERVER_DEF)
server1 = dict(default_options)
server1.update({'url': 'https://galaxy.ansible.com/api/', 'validate_certs': False})
server2 = dict(default_options)
server2.update({'url': 'https://galaxy.ansible.com/api/', 'validate_certs': True})
server3 = dict(default_options)
server3.update({'url': 'https://galaxy.ansible.com/api/'})
return server1, server2, server3
@pytest.mark.parametrize('global_ignore_certs', [True, False])
def test_validate_certs(global_ignore_certs, server_config, monkeypatch):
get_plugin_options = MagicMock(side_effect=server_config)
monkeypatch.setattr(C.config, 'get_plugin_options', get_plugin_options)
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
]
if global_ignore_certs:
cli_args.append('--ignore-certs')
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
assert galaxy_cli.api_servers[0].validate_certs is False
assert galaxy_cli.api_servers[1].validate_certs is True
assert galaxy_cli.api_servers[2].validate_certs is not global_ignore_certs
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
with pytest.raises(AnsibleError,
|
radio-ho0/dotfiles
|
_vim/plugin/TagHighlight/module/utilities.py
|
Python
|
gpl-2.0
| 5,265
| 0.003609
|
#!/usr/bin/env python
# Tag Highlighter:
# Author: A. S. Budden <abudden _at_ gmail _dot_ com>
# Copyright: Copyright (C) 2009-2011 A. S. Budden
# Permission is hereby granted to use and distribute this code,
# with or without modifications, provided that this copyright
# notice is copied with it. Like anything else that's free,
# the TagHighlight plugin is provided *as is* and comes with no
# warranty of any kind, either expressed or implied. By using
# this plugin, you agree that in no event will the copyright
# holder be liable for any damages resulting from the use
# of this software.
# ---------------------------------------------------------------------
from __future__ import print_function
import time
import re
# Used for timing a function; from http://www.daniweb.com/code/snippet368.html
# decorator: put @print_timing before a function to time it.
def print_timing(func):
def wrapper(*arg):
t1 = time.time()
res = func(*arg)
t2 = time.time()
print('{name} took {time:0.3f} ms'.format(name=func.__name__, time=(t2-t1)*1000.0))
return res
return wrapper
class TagHighlightOptionDict(dict):
"""Customised version of a dictionary that allows access by attribute."""
def __getattr__(self, name):
return self[name]
def __getitem__(self, name):
if name not in self:
from .options import AllOptions
for option in AllOptions.keys():
if option == name:
return AllOptions[option]['Default']
return super(TagHighlightOptionDict, self).__getitem__(name)
def __setattr__(self, name, value):
self[name] = value
class SetDict(dict):
"""Customised version of a dictionary that auto-creates non-existent keys as sets."""
def __getitem__(self, key):
if key not in self:
self[key] = set()
return super(SetDict, self).__getitem__(key)
def __setitem__(self, key, value):
if isinstance(value, set):
super(SetDict, self).__setitem__(key, value)
else:
super(SetDict, self).__setitem__(key, set([value]))
class DictDict(dict):
"""Customised version of a dictionary that auto-creates non-existent keys as SetDicts."""
def __getitem__(self, key):
if key not in self:
self[key] = SetDict()
return super(DictDict, self).__getitem__(key)
def __setitem__(self, key, value):
if isinstance(value, SetDict):
super(DictDict, self).__setitem__(key, value)
else:
raise NotImplementedError
def GenerateValidKeywordRange(iskeyword):
# Generally obeys Vim's iskeyword setting, but
# only allows characters in ascii range
ValidKeywordSets = iskeyword.split(',')
rangeMatcher = re.compile('^(?P<from>(?:\d+|\S))-(?P<to>(?:\d+|\S))$')
falseRangeMatcher = re.compile('^^(?P<from>(?:\d+|\S))-(?P<to>(?:\d+|\S))$')
validList = []
for valid in ValidKeywordSets:
m = rangeMatcher.match(valid)
fm = falseRangeMatcher.match(valid)
if valid == '@':
for ch in [chr(i) for i in range(0,128)]:
if ch.isalpha():
validList.append(ch)
elif m is not None:
# We have a range of ascii values
if m.group('from').isdigit():
rangeFrom = int(m.group('from'))
else:
rangeFrom = ord(m.group('from'))
if m.group('to').isdigit():
rangeTo = int(m.group('to'))
else:
rangeTo = ord(m.group('to'))
validRange = list(range(rangeFrom, rangeTo+1))
# Restrict to ASCII
validRange = [i for i in validRange if i < 128]
for ch in [chr(i) for i in validRange]:
validList.append(ch)
elif fm is not None:
# We have a range of ascii values: remove them!
if fm.group('from').isdigit():
rangeFrom = int(fm.group('from'))
else:
rangeFrom = ord(fm.group('from'))
if fm.group('to').isdig
|
it():
rangeTo = int
|
(fm.group('to'))
else:
rangeTo = ord(fm.group('to'))
validRange = range(rangeFrom, rangeTo+1)
for ch in [chr(i) for i in validRange]:
for i in range(validList.count(ch)):
validList.remove(ch)
elif len(valid) == 1:
# Just a char
if ord(valid) < 128:
validList.append(valid)
else:
raise ValueError('Unrecognised iskeyword part: ' + valid)
return validList
def IsValidKeyword(keyword, iskeyword):
for char in keyword:
if not char in iskeyword:
return False
return True
if __name__ == "__main__":
import pprint
test_obj = SetDict()
# Should be able to add an item to the list
pprint.pprint(test_obj)
test_obj['MyIndex'].add('Hello')
test_obj['SetList'] = ['This', 'Is', 'A', 'List']
test_obj['SetString'] = 'This is a string'
# These should all be lists:
pprint.pprint(test_obj)
|
creasyw/IMTAphy
|
framework/library/PyConfig/openwns/probebus.py
|
Python
|
gpl-2.0
| 9,523
| 0.006826
|
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: info@openwns.org
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import openwns
import openwns.logger
from openwns.pyconfig import attrsetter
import openwns.interface
class NeedsFilename(openwns.interface.Interface):
@openwns.interface.abstractmethod
def setFilename(self, filename):
pass
class MeasurementSource(object):
def __init__(self):
object.__init__(self)
self.observers = []
def addObserver(self, probeBus):
self.observers.append(probeBus)
return probeBus
class ProbeBus(MeasurementSource):
def __init__(self):
MeasurementSource.__init__(self)
def observe(self, probeBus):
probeBus.addObserver(self)
return probeBus
class ProbeBusRegistry(object):
def __init__(self):
super(ProbeBusRegistry, self).__init__()
self.measurementSources = {}
self.logger = openwns.logger.Logger("WNS", "ProbeBusRegistry", True)
def getMeasurementSource(self, probeBusID):
if not self.measurementSources.has_key(probeBusID):
self.measurementSources[probeBusID] = MeasurementSource()
return self.measurementSources[probeBusID]
def removeMeasurementSource(self, probeBusID):
self.measurementSources.pop(probeBusID)
def getMeasurementSources(self):
return self.measurementSources
class PassThroughProbeBus(ProbeBus):
""" The PassThroughProbeBus always accepts and always forwards. """
nameInFactory = "PassThroughProbeBus"
def __init__(self):
ProbeBus.__init__(self)
class SettlingTimeGuardProbeBus(ProbeBus):
""" The SettlingTimeGuardProbeBus only accepts if the global settling time (transient phase)
has elapsed"""
nameInFactory = "SettlingTimeGuardProbeBus"
def __init__(self, settlingTime):
ProbeBus.__init__(self)
self.settlingTime = settlingTime
class LoggingProbeBus(ProbeBus):
""" The LoggingProbeBus always accepts and logs the message to the logging subsystem.
"""
nameInFactory = "LoggingProbeBus"
def __init__(self, probeName='', parentLogger=None):
ProbeBus.__init__(self)
if len(probeName) > 0:
probeName = '.' + probeName
self.logger = openwns.logger.Logger("WNS", "LoggingProbeBus"+probeName, True, parentLogger)
class PythonProbeBus(ProbeBus):
""" Use the PythonProbeBus to do all your probing work in python. Specify what to do
in accepts, onMeasurement, output from within your configuration file."""
nameInFactory = "PythonProbeBus"
def _dummyOnMeasurement(timestamp, value, reg):
pass
def _dummyOutput():
pass
def __init__(self, acceptsFunction, onMeasurementFunction = _dummyOnMeasurement, outputFunction = _dummyOutput):
ProbeBus.__init__(self)
self.accepts = acceptsFunction
self.onMeasurement = onMeasurementFunction
self.output = outputFunction
self.reportErrors = True
class TimeWindowProbeBus(ProbeBus):
""" Only accepts
|
for a certain time window given by start and end time"""
nameInFactory = "TimeWindowProbeBus"
def __init__(self, start, end):
ProbeBus.__init__(self)
self.start = start
self.end = end
class TimeSeriesProbeBus(ProbeBus):
""" The LogEval ProbeBus always accepts and logs the values into a file.
"""
nameInFactory = "TimeSeriesProbeBus"
outputFilename = None
format = None
timePrecision = No
|
ne
valuePrecision = None
name = None
description = None
contextKeys = None
def __init__(self, outputFilename, format, timePrecision, valuePrecision, name, desc, contextKeys):
ProbeBus.__init__(self)
self.outputFilename = outputFilename
self.format = format
self.timePrecision = timePrecision
self.valuePrecision = valuePrecision
self.name = name
self.description = desc
self.contextKeys = contextKeys
class ContextFilterProbeBus(ProbeBus):
nameInFactory = "ContextFilterProbeBus"
idName = None
idValues = None
def __init__(self, _idName, _idValues, _outputName = None):
ProbeBus.__init__(self)
self.idName = _idName
self.idValues = _idValues
class ConstantContextProvider(object):
__plugin__ = "wns.ProbeBus.ConstantContextProvider"
""" Name in the static factory """
key = None
""" The name of the context """
value = None
""" A constant integer value """
def __init__(self, key, value):
super(ConstantContextProvider, self).__init__()
self.key = key
self.value = value
class StatEvalProbeBus(ProbeBus):
nameInFactory = "StatEvalProbeBus"
statEval = None
appendFlag = None
def __init__(self, outputFilename, statEvalConfig):
ProbeBus.__init__(self)
self.outputFilename = outputFilename
self.statEval = statEvalConfig
if (statEvalConfig.appendFlag == None):
self.appendFlag = False
else:
self.appendFlag = statEvalConfig.appendFlag
class TabPar:
"""
Helper Class to configure the TableProbeBus.
Configure one of these for each dimension of your table.
Parameters:
idName: the name in the IDregistry/Context under which the
value for this axis should be searched
minimum: min value of the axis
maximum: max value of the axis
resolution: number of equidistant intervals into which the
range from min to max will be divided. Note that
the maximum value will be counted into the last interval
"""
idName = None
minimum = None
maximum = None
resolution = None
def __init__(self, idName, minimum, maximum, resolution):
self.idName = idName
self.minimum = minimum
self.maximum = maximum
self.resolution = resolution
class TableProbeBus(ProbeBus):
"""
The TableProbeBus consumes measurement values and sorts them
into n-dimensional tables of statistical evaluation objects.
Parameters:
axisParams: list of TabPar objecst, one for each dimension of the desired table
outputFilename: base name of the output files produced by the TableProbeBus
evals: list of strings with the requested statistics, possible values are:
'mean', 'variance', 'relativeVariance', 'coeffOfVariation', 'M2', 'M3', 'Z3',
'skewness', 'deviation', 'relativeDeviation', 'trials', 'min', 'max'
formats: list of strings with the requested output formats, possible values are:
'HumanReadable', 'PythonReadable', 'MatlabReadable', 'Matlab
|
henry-ngo/VIP
|
vip_hci/stats/cube_stats.py
|
Python
|
mit
| 9,662
| 0.009004
|
#! /usr/bin/env python
"""
Module for stats of a fits-cube.
"""
from __future__ import division
__author__ = 'C. Gomez @ ULg'
__all__ = ['cube_stats_aperture',
'cube_stats_annulus']
import numpy as np
import scipy.stats
from matplotlib import pyplot as plt
from ..var import get_annulus, get_circle, get_annulus_cube
def cube_stats_aperture(arr, radius, xy=None, plot=False, full_output=False):
"""Calculates statistics in an aperture on a 2D or 3D array and plots the
variation of the mean, median and standard deviation as a functions of time.
Parameters
----------
arr : array_like
Input array.
radius : int
Radius.
xy : tuple of ints
Corrdinated of the center of the aperture
plot : None,1,2, optional
If 1 or True it plots the mean, std_dev and max. Also the histogram.
If 2 it also plots the linear correlation between the median and the
std_dev.
full_output : {False,True}, optional
If true it returns mean, std_dev, median, if false just the mean.
Returns
-------
If full_out is true it returns the sum, mean, std_dev, median. If false
only the mean.
"""
if arr.ndim == 2:
if xy is not None:
x, y = xy
circle = get_circle(arr, radius, output_values=True, cy=y, cx=x)
else:
circle = get_circle(arr, radius, output_values=True)
maxi = circle.max()
mean = circle.mean()
std_dev = circle.std()
median = np.median(circle)
if arr.ndim == 3:
n = arr.shape[0]
mean = np.empty(n)
std_dev = np.empty(n)
median = np.empty(n)
maxi = np.empty(n)
values_circle = []
for i in range(n):
if xy is not None:
x, y = xy
circle = get_circle(arr[i], radius, output_values=True, cy=y, cx=x)
else:
circle = get_circle(arr[i], radius, output_values=True)
values_circle.append(circle)
maxi[i] = circle.max()
mean[i] = circle.mean()
std_dev[i] = circle.std()
median[i] = np.median(circle)
values_circle = np.array(values_circle).flatten()
if plot==1 or plot==2:
plt.figure('Image crop (first slice)', figsize=(12,3))
if xy is not None:
x, y = xy
temp = get_circle(arr[0], radius, cy=y, cx=x)
else:
temp = get_circle(arr[0], radius)
ax1 = plt.subplot(1, 2, 1)
ax1.imshow(arr[0], origin = 'lower', interpolation="nearest",
cmap = plt.get_cmap('gray'), alpha=0.8)
ax1.imshow(temp, origin = 'lower', interpolation="nearest",
cmap = plt.get_cmap('CMRmap'), alpha=0.6)
plt.axis('on')
ax2 = plt.subplot(1, 2, 2)
ax2.hist(values_circle, bins=int(np.sqrt(values_circle.shape[0])),
alpha=0.5, histtype='stepfilled', label='Histogram')
ax2.legend()
ax2.tick_params(axis='x', labelsize=8)
fig = plt.figure('Stats in annulus', figsize=(12, 6))
fig.subplots_adjust(hspace=0.15)
ax1 = plt.subplot(3, 1, 1)
std_of_means = np.std(mean)
median_of_means = np.median(mean)
lab = 'mean (median={:.1f}, stddev={:.1f})'.format(median_of_means,
std_of_means)
ax1.axhline(median_of_means, alpha=0.5, color='gray', lw=2, ls='--')
ax1.plot(mean, '.-', label=lab, lw = 0.8, alpha=0.6, marker='o',
color='b')
ax1.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5)
ax1.grid(True)
plt.setp(ax1.get_xticklabels(), visible=False)
ax2 = plt.
|
subplot(3, 1, 2,
|
sharex=ax1)
ax2.plot(std_dev, '.-', label='std_dev', lw = 0.8, alpha=0.6,
marker='o', color='r')
ax2.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5)
ax2.grid(True)
plt.setp(ax2.get_xticklabels(), visible=False)
ax3 = plt.subplot(3, 1, 3, sharex=ax1)
ax3.plot(maxi, '.-', label='max', lw=0.8, alpha=0.6, marker='o',
color='g')
ax3.legend(loc='best', fancybox=True).get_frame().set_alpha(0.5)
ax3.grid(True)
if plot==2:
plt.figure('Std_dev - mean in annulus', figsize=(4, 4))
plt.scatter(std_dev, mean, alpha=0.6)
m, b = np.polyfit(std_dev, mean, 1)
corr, _ = scipy.stats.pearsonr(mean, std_dev)
plt.plot(std_dev, m*std_dev + b, '-', label=corr, alpha=0.6)
plt.xlabel('Mean')
plt.ylabel('Standard deviation')
plt.legend()
if full_output:
return mean, std_dev, median, maxi
else:
return mean
def cube_stats_annulus(array, inner_radius, size, plot=None, full_out=False):
"""Calculates statistics in a centered annulus on a 2D or 3D array and
plots the variation of the mean, median and standard deviation as a
functions of time.
Parameters
----------
array : array_like
Input array.
inner_radius : int
Annulus inner radius.
size : int
How many pixels in radial direction contains the annulus.
plot : None,1,2, optional
If 1 or True it plots the mean, std_dev and max. Also the histogram.
If 2 it also plots the linear correlation between the median and the
std_dev.
full_out : {False,True}, optional
If true it returns mean, std_dev, median, if false just the mean.
Returns
-------
If full_out is true it returns mean, std_dev, median, if false
only the mean.
"""
if array.ndim==2:
arr = array.copy()
annulus = get_annulus(arr, inner_radius, size, output_values=True)
mean = annulus.mean()
std_dev = annulus.std()
median = np.median(annulus)
maxi = annulus.max()
if array.ndim==3:
n = array.shape[0]
mean = np.empty(n)
std_dev = np.empty(n)
median = np.empty(n)
maxi = np.empty(n)
for i in range(n):
arr = array[i].copy()
annulus = get_annulus(arr, inner_radius, size, output_values=True)
mean[i] = annulus.mean()
std_dev[i] = annulus.std()
median[i] = np.median(annulus)
maxi[i] = annulus.max()
if plot==1 or plot==2:
plt.figure('Image crop (first slice)', figsize=(12,3))
temp = get_annulus_cube(array, inner_radius, size)
ax1 = plt.subplot(1, 2, 1)
ax1.imshow(array[0], origin = 'lower', interpolation="nearest",
cmap = plt.get_cmap('gray'), alpha=0.8)
ax1.imshow(temp[0], origin = 'lower', interpolation="nearest",
cmap = plt.get_cmap('CMRmap'), alpha=0.6)
plt.axis('on')
ax2 = plt.subplot(1, 2, 2)
values = temp[np.where(temp>0)]
ax2.hist(values.ravel(), bins=int(np.sqrt(values.shape[0])),
alpha=0.5, histtype='stepfilled', label='Histogram')
ax2.legend()
ax2.tick_params(axis='x', labelsize=8)
fig = plt.figure('Stats in annulus', figsize=(12, 6))
fig.subplots_adjust(hspace=0.15)
ax1 = plt.subplot(3, 1, 1)
std_of_means = np.std(mean)
median_of_means = np.median(mean)
lab = 'mean (median={:.1f}, stddev={:.1f})'.format(median_of_means,
std_of_means)
ax1.axhline(median_of_means, alpha=0.5, color='gray', lw=2, ls=
|
attm2x/m2x-demo-heroku-python
|
stockreport.py
|
Python
|
mit
| 1,141
| 0.000876
|
#!/usr/bin/env python
from requests.exceptions import HTTPError
from datetime import datetime
import ystockquote
from m2x.client import M2XClient
def post_stock_price(symbol, apikey, devicename):
'''
Retrieve the stock price for the g
|
iven ticker symbol ("T" for
|
AT&T) and
post it in the correct M2X data stream.
'''
client = M2XClient(key=apikey)
# Find the correct device if it exists, if not create it.
try:
device = [d for d in client.devices(q=devicename) if d.name == devicename][0]
except IndexError:
device = client.create_device(name=devicename,
description="Stockreport Example Device",
visibility="private")
# Get the stream if it exists, if not create the stream.
try:
stream = device.stream(symbol)
except HTTPError:
stream = device.create_stream(symbol)
device.update_stream(symbol, unit={'label': 'Dollars', 'symbol': '$'})
postime = datetime.now()
stock_price = ystockquote.get_price(symbol).encode('utf-8')
stream.add_value(stock_price, postime)
|
alvations/sacremoses
|
sacremoses/tokenize.py
|
Python
|
mit
| 31,370
| 0.000893
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
from six import text_type
from sacremoses.corpus import Perluniprops
from sacremoses.corpus import NonbreakingPrefixes
from sacremoses.util import is_cjk
from sacremoses.indic import VIRAMAS, NUKTAS
perluniprops = Perluniprops()
nonbreaking_prefixes = NonbreakingPrefixes()
class MosesTokenizer(obj
|
ect):
"""
This is a Python port of the Moses Tokenizer from
https://github.com/moses-smt/mosesdecoder/blob/master/scripts
|
/tokenizer/tokenizer.perl
"""
# Perl Unicode Properties character sets.
IsN = text_type("".join(perluniprops.chars("IsN")))
IsAlnum = text_type(
"".join(perluniprops.chars("IsAlnum")) + "".join(VIRAMAS) + "".join(NUKTAS)
)
IsSc = text_type("".join(perluniprops.chars("IsSc")))
IsSo = text_type("".join(perluniprops.chars("IsSo")))
IsAlpha = text_type(
"".join(perluniprops.chars("IsAlpha")) + "".join(VIRAMAS) + "".join(NUKTAS)
)
IsLower = text_type("".join(perluniprops.chars("IsLower")))
# Remove ASCII junk.
DEDUPLICATE_SPACE = r"\s+", r" "
ASCII_JUNK = r"[\000-\037]", r""
# Neurotic Perl heading space, multi-space and trailing space chomp.
# These regexes are kept for reference purposes and shouldn't be used!!
MID_STRIP = r" +", r" " # Use DEDUPLICATE_SPACE instead.
LEFT_STRIP = r"^ ", r"" # Uses text.lstrip() instead.
RIGHT_STRIP = r" $", r"" # Uses text.rstrip() instead.
# Pad all "other" special characters not in IsAlnum.
PAD_NOT_ISALNUM = r"([^{}\s\.'\`\,\-])".format(IsAlnum), r" \1 "
# Splits all hyphens (regardless of circumstances), e.g.
# 'foo-bar' -> 'foo @-@ bar'
AGGRESSIVE_HYPHEN_SPLIT = (
r"([{alphanum}])\-(?=[{alphanum}])".format(alphanum=IsAlnum),
r"\1 @-@ ",
)
# Make multi-dots stay together.
REPLACE_DOT_WITH_LITERALSTRING_1 = r"\.([\.]+)", " DOTMULTI\1"
REPLACE_DOT_WITH_LITERALSTRING_2 = r"DOTMULTI\.([^\.])", "DOTDOTMULTI \1"
REPLACE_DOT_WITH_LITERALSTRING_3 = r"DOTMULTI\.", "DOTDOTMULTI"
# Separate out "," except if within numbers (5,300)
# e.g. A,B,C,D,E > A , B,C , D,E
# First application uses up B so rule can't see B,C
# two-step version here may create extra spaces but these are removed later
# will also space digit,letter or letter,digit forms (redundant with next section)
COMMA_SEPARATE_1 = r"([^{}])[,]".format(IsN), r"\1 , "
COMMA_SEPARATE_2 = r"[,]([^{}])".format(IsN), r" , \1"
COMMA_SEPARATE_3 = r"([{}])[,]$".format(IsN), r"\1 , "
# Attempt to get correct directional quotes.
DIRECTIONAL_QUOTE_1 = r"^``", r"`` "
DIRECTIONAL_QUOTE_2 = r'^"', r"`` "
DIRECTIONAL_QUOTE_3 = r"^`([^`])", r"` \1"
DIRECTIONAL_QUOTE_4 = r"^'", r"` "
DIRECTIONAL_QUOTE_5 = r'([ ([{<])"', r"\1 `` "
DIRECTIONAL_QUOTE_6 = r"([ ([{<])``", r"\1 `` "
DIRECTIONAL_QUOTE_7 = r"([ ([{<])`([^`])", r"\1 ` \2"
DIRECTIONAL_QUOTE_8 = r"([ ([{<])'", r"\1 ` "
# Replace ... with _ELLIPSIS_
REPLACE_ELLIPSIS = r"\.\.\.", r" _ELLIPSIS_ "
# Restore _ELLIPSIS_ with ...
RESTORE_ELLIPSIS = r"_ELLIPSIS_", r"\.\.\."
# Pad , with tailing space except if within numbers, e.g. 5,300
COMMA_1 = r"([^{numbers}])[,]([^{numbers}])".format(numbers=IsN), r"\1 , \2"
COMMA_2 = r"([{numbers}])[,]([^{numbers}])".format(numbers=IsN), r"\1 , \2"
COMMA_3 = r"([^{numbers}])[,]([{numbers}])".format(numbers=IsN), r"\1 , \2"
# Pad unicode symbols with spaces.
SYMBOLS = r"([;:@#\$%&{}{}])".format(IsSc, IsSo), r" \1 "
# Separate out intra-token slashes. PTB tokenization doesn't do this, so
# the tokens should be merged prior to parsing with a PTB-trained parser.
# e.g. "and/or" -> "and @/@ or"
INTRATOKEN_SLASHES = (
r"([{alphanum}])\/([{alphanum}])".format(alphanum=IsAlnum),
r"$1 \@\/\@ $2",
)
# Splits final period at end of string.
FINAL_PERIOD = r"""([^.])([.])([\]\)}>"']*) ?$""", r"\1 \2\3"
# Pad all question marks and exclamation marks with spaces.
PAD_QUESTION_EXCLAMATION_MARK = r"([?!])", r" \1 "
# Handles parentheses, brackets and converts them to PTB symbols.
PAD_PARENTHESIS = r"([\]\[\(\){}<>])", r" \1 "
CONVERT_PARENTHESIS_1 = r"\(", "-LRB-"
CONVERT_PARENTHESIS_2 = r"\)", "-RRB-"
CONVERT_PARENTHESIS_3 = r"\[", "-LSB-"
CONVERT_PARENTHESIS_4 = r"\]", "-RSB-"
CONVERT_PARENTHESIS_5 = r"\{", "-LCB-"
CONVERT_PARENTHESIS_6 = r"\}", "-RCB-"
# Pads double dashes with spaces.
PAD_DOUBLE_DASHES = r"--", " -- "
# Adds spaces to start and end of string to simplify further regexps.
PAD_START_OF_STR = r"^", " "
PAD_END_OF_STR = r"$", " "
# Converts double quotes to two single quotes and pad with spaces.
CONVERT_DOUBLE_TO_SINGLE_QUOTES = r'"', " '' "
# Handles single quote in possessives or close-single-quote.
HANDLES_SINGLE_QUOTES = r"([^'])' ", r"\1 ' "
# Pad apostrophe in possessive or close-single-quote.
APOSTROPHE = r"([^'])'", r"\1 ' "
# Prepend space on contraction apostrophe.
CONTRACTION_1 = r"'([sSmMdD]) ", r" '\1 "
CONTRACTION_2 = r"'ll ", r" 'll "
CONTRACTION_3 = r"'re ", r" 're "
CONTRACTION_4 = r"'ve ", r" 've "
CONTRACTION_5 = r"n't ", r" n't "
CONTRACTION_6 = r"'LL ", r" 'LL "
CONTRACTION_7 = r"'RE ", r" 'RE "
CONTRACTION_8 = r"'VE ", r" 'VE "
CONTRACTION_9 = r"N'T ", r" N'T "
# Informal Contractions.
CONTRACTION_10 = r" ([Cc])annot ", r" \1an not "
CONTRACTION_11 = r" ([Dd])'ye ", r" \1' ye "
CONTRACTION_12 = r" ([Gg])imme ", r" \1im me "
CONTRACTION_13 = r" ([Gg])onna ", r" \1on na "
CONTRACTION_14 = r" ([Gg])otta ", r" \1ot ta "
CONTRACTION_15 = r" ([Ll])emme ", r" \1em me "
CONTRACTION_16 = r" ([Mm])ore'n ", r" \1ore 'n "
CONTRACTION_17 = r" '([Tt])is ", r" '\1 is "
CONTRACTION_18 = r" '([Tt])was ", r" '\1 was "
CONTRACTION_19 = r" ([Ww])anna ", r" \1an na "
# Clean out extra spaces
CLEAN_EXTRA_SPACE_1 = r" *", r" "
CLEAN_EXTRA_SPACE_2 = r"^ *", r""
CLEAN_EXTRA_SPACE_3 = r" *$", r""
# Neurotic Perl regexes to escape special characters.
ESCAPE_AMPERSAND = r"&", r"&"
ESCAPE_PIPE = r"\|", r"|"
ESCAPE_LEFT_ANGLE_BRACKET = r"<", r"<"
ESCAPE_RIGHT_ANGLE_BRACKET = r">", r">"
ESCAPE_SINGLE_QUOTE = r"\'", r"'"
ESCAPE_DOUBLE_QUOTE = r"\"", r"""
ESCAPE_LEFT_SQUARE_BRACKET = r"\[", r"["
ESCAPE_RIGHT_SQUARE_BRACKET = r"]", r"]"
EN_SPECIFIC_1 = r"([^{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2"
EN_SPECIFIC_2 = (
r"([^{alpha}{isn}])[']([{alpha}])".format(alpha=IsAlpha, isn=IsN),
r"\1 ' \2",
)
EN_SPECIFIC_3 = r"([{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2"
EN_SPECIFIC_4 = r"([{alpha}])[']([{alpha}])".format(alpha=IsAlpha), r"\1 '\2"
EN_SPECIFIC_5 = r"([{isn}])[']([s])".format(isn=IsN), r"\1 '\2"
ENGLISH_SPECIFIC_APOSTROPHE = [
EN_SPECIFIC_1,
EN_SPECIFIC_2,
EN_SPECIFIC_3,
EN_SPECIFIC_4,
EN_SPECIFIC_5,
]
FR_IT_SPECIFIC_1 = r"([^{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2"
FR_IT_SPECIFIC_2 = r"([^{alpha}])[']([{alpha}])".format(alpha=IsAlpha), r"\1 ' \2"
FR_IT_SPECIFIC_3 = r"([{alpha}])[']([^{alpha}])".format(alpha=IsAlpha), r"\1 ' \2"
FR_IT_SPECIFIC_4 = r"([{alpha}])[']([{alpha}])".format(alpha=IsAlpha), r"\1' \2"
FR_IT_SPECIFIC_APOSTROPHE = [
FR_IT_SPECIFIC_1,
FR_IT_SPECIFIC_2,
FR_IT_SPECIFIC_3,
FR_IT_SPECIFIC_4,
]
NON_SPECIFIC_APOSTROPHE = r"\'", " ' "
TRAILING_DOT_APOSTROPHE = r"\.' ?$", " . ' "
BASIC_PROTECTED_PATTERN_1 = r"<\/?\S+\/?>"
BASIC_PROTECTED_PATTERN_2 = r'<\S+( [a-zA-Z0-9]+\="?[^"]")+ ?\/?>'
BASIC_PROTECTED_PATTERN_3 = r"<\S+( [a-zA-Z0-9]+\='?[^']')+ ?\/?>"
BASIC_PROTECTED_PATTERN_4 = r"[\w\-\_\.]+\@([\w\-\_]+\.)+[a-zA-Z]{2,}"
BASIC_PROTECTED_PATTERN_5 = r"(http[s]?|ftp):\/\/[^:\/\s]+(\/\w+)*\/[\w\-\.]+"
MOSES_PENN_REGEXES_1 = [
DEDUPLICATE_SPACE,
ASCII_JUNK,
DIRECTIONAL
|
JNU-Include/CNN
|
lib/mnist_classifier_del.py
|
Python
|
mit
| 2,492
| 0.005659
|
import matplotlib.pyplot as plt
import tensorflow as tf
from softmax_del import Softmax
from lib import mytool
'''
gildong = MnistClassifier()
gildong.learn(3, 100) # epoch, partial_size
gildong
|
.evaluate() # for all test data
gildong.classify_random_image() # classify a rando
|
mly selected image
#gildong.show_errors()
'''
class MnistClassifier (Softmax):
db = None
learning_epoch = None #15
size_of_segment = None #100
def load_mnist(self):
return mytool.load_mnist()
def learn(self, epoch, partial):
self.learning_epoch = epoch
self.size_of_segment = partial
self.db = self.load_mnist()
super().learn(self.db, self.learning_epoch, self.size_of_segment)
def get_number_of_segment(self):
return int(self.db.train.num_examples / self.size_of_segment) #55,000 / 100
def get_next_segment(self):
return self.db.train.next_batch(self.size_of_segment)
def get_image(self, index):
# Get one and predict
image = self.db.test.images[index:index+1]
return image
def get_label(self, index):
label = self.db.test.labels[index:index+1]
return label
def get_class(self, index):
label = self.db.test.labels[index:index+1]
return self.sess.run(tf.arg_max(label, 1))
def classify(self, an_image):
category = self.sess.run(tf.argmax(self.hypothesis, 1), feed_dict={self.X: an_image})
return category
def classify_random_image(self):
index = mytool.get_random_int(self.db.test.num_examples)
image = self.get_image(index)
label = self.get_class(index)
category = self.classify(image)
print('Label', label)
print('Classified', category)
self.show_image(image)
def show_image(self, image):
plt.imshow(image.reshape(28, 28), cmap='Greys', interpolation='nearest')
plt.show()
# 테스트 데이터로 평가
def evaluate(self):
# Test model
is_correct = tf.equal(tf.arg_max(self.hypothesis, 1), tf.arg_max(self.Y, 1))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))
# Test the model using test sets
result = accuracy.eval(session=self.sess, feed_dict={self.X: self.db.test.images, self.Y: self.db.test.labels})
#result = self.sess.run(accuracy, feed_dict={self.X: db.test.images, self.Y: db.test.labels})
print("Recognition rate :", result)
|
Aidturith/python-training
|
utils/random_pass/random_password.py
|
Python
|
mit
| 4,858
| 0.005569
|
# -*- coding: utf-8 -*-
# tools, 5 juin 2017
import string
import math
import random
import argparse
from argparse import RawTextHelpFormatter
CONSO_MAJ = u"C"
CONSO_MIN = u"c"
VOWEL_MAJ = u"V"
VOWEL_MIN = u"v"
LETTER_MAJ = u"L"
LETTER_MIN = u"l"
NUMBER = u"n"
PONCTION = u"P"
PONCTION_SMA = u"p"
VALID_CHARSET = {
CONSO_MAJ: u"BCDFGHJKLMNPQRSTVWXZ",
CONSO_MIN: u"BCDFGHJKLMNPQRSTVWXZ".lower(),
VOWEL_MAJ: u"AEIOUY",
VOWEL_MIN: u"AEIOUY".lower(),
LETTER_MAJ: string.ascii_uppercase,
LETTER_MIN: string.ascii_lowercase,
PONCTION: string.punctuation,
PONCTION_SMA: u"!\"#$%&'*+,-.:;=?@_",
NUMBER: string.digits,
}
class RandomPassword():
def _pattern_validation(self, pattern):
for char in pattern:
if char not in VALID_CHARSET.keys():
raise LookupError()
def _get_charset_len(self, pattern):
charset_len = 0
if CONSO_MIN in pattern and LETTER_MIN not in pattern:
charset_len += len(VALID_CHARSET[CONSO_MIN])
if CONSO_MAJ in pattern and LETTER_MAJ not in pattern:
charset_len += len(VALID_CHARSET[CONSO_MAJ])
if VOWEL_MIN in pattern and LETTER_MIN not in pattern:
charset_len += len(VALID_CHARSET[VOWEL_MIN])
if VOWEL_MAJ in pattern and LETTER_MAJ not in pattern:
charset_len += len(VALID_CHARSET[VOWEL_MAJ])
if LETTER_MIN in pattern:
charset_len += len(VALID_CHARSET[LETTER_MIN])
if LETTER_MAJ in pattern:
charset_len += len(VALID_CHARSET[LETTER_MAJ])
if NUMBER in pattern:
charset_len += len(VALID_CHARSET[NUMBER])
if PONCTION in pattern:
charset_len += len(VALID_CHARSET[PONCTION])
if PONCTION_SMA in pattern:
charset_len += len(VALID_CHARSET[PONCTION_SMA])
return charset_len
def _get_password(self, pattern):
try:
self._pattern_validation(pattern)
except LookupError:
print(u"Le format n'est pas valide!")
password = u''
for char in pattern:
for charset_key in VALID_CHARSET.keys():
if char == charset_key:
password += random.SystemRandom().choice(VALID_CHARSET[charset_key])
return password
|
def get_passwords(self, pattern, iterations):
passwords = []
for n in range(0, iterations):
passwords.append(self._get_password(pattern))
return passwords
def get_entropy(self, pattern):
try:
self._pattern_validation(pattern)
except LookupError:
print(u"Le format n'est pas valide!")
charset_len = self._get_charset_len(pattern)
pattern_len = len(pattern)
entropy = math.log(math.pow(cha
|
rset_len, pattern_len), 2)
return round(entropy)
def write_list(self, filepath, liste):
with open(filepath, u'w') as f:
for entry in list: f.write(entry)
parser = argparse.ArgumentParser(description=u"Génération de mots de passe aléatoires.",
formatter_class=RawTextHelpFormatter)
parser.add_argument(u'pattern', help=u"format du mot de passe: \n \
%s pour les CONSONNES \n \
%s pour les consonnes \n \
%s pour les VOYELLES \n \
%s pour les voyelles \n \
%s pour les LETTRES \n \
%s pour les lettres \n \
%s pour les nombres \n \
%s pour la ponctuation étendue \n \
%s pour la ponctuation"
% (CONSO_MAJ, CONSO_MIN, VOWEL_MAJ, VOWEL_MIN,
LETTER_MAJ, LETTER_MIN, NUMBER, PONCTION, PONCTION_SMA))
parser.add_argument(u'-n', u'--number', type=int, default=1, help=u"nombre de mdp à générer")
parser.add_argument(u'-o', u'--output', type=argparse.FileType('w'), help=u"fichier de sortie")
parser.add_argument(u'-e', u'--entropy', action=u"store_true", help=u"affiche l'entropie des mdp générés en bits")
args = parser.parse_args() #[u'Cvcvnnnn', u'-en', u'35', u'-o', u'pass.txt']
rand_p = RandomPassword()
# ENTROPY
if(args.entropy):
print(u"L'entropie est de %s bits." % rand_p.get_entropy(args.pattern))
# LIST PASSWORDS
passwords = rand_p.get_passwords(args.pattern, iterations=args.number)
print(u"\nListe des mots de passe:")
for entry in passwords: print(u" - %s" % entry)
# WRITE PASSWORDS
if(args.output):
with args.output as f:
for entry in passwords: f.write(u'%s\n' % entry)
|
kubeflow/kfp-tekton-backend
|
sdk/python/kfp/components_tests/test_structure_model_base.py
|
Python
|
apache-2.0
| 9,140
| 0.003392
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from pathlib import Path
from typing import List, Dict, Union, Optional
from ..components.modelbase import ModelBase
class TestModel1(ModelBase):
_serialized_names = {
'prop_1': 'prop1',
'prop_2': 'prop 2',
'prop_3': '@@',
}
def __init__(self,
prop_0: str,
prop_1: Optional[str] = None,
prop_2: Union[int, str, bool] = '',
prop_3: 'TestModel1' = None,
prop_4: Optional[Dict[str, 'TestModel1']] = None,
prop_5: Optional[Union['TestModel1', List['TestModel1']]] = None,
):
#print(locals())
super().__init__(locals())
class StructureModelBaseTestCase(unittest.TestCase):
def test_handle_type_check_for_simple_builtin(self):
self.assertEqual(TestModel1(prop_0='value 0').prop_0, 'value 0')
with self.assertRaises(TypeError):
TestModel1(prop_0=1)
with self.assertRaises(TypeError):
TestModel1(prop_0=None)
with self.assertRaises(TypeError):
TestModel1(prop_0=TestModel1(prop_0='value 0'))
def test_handle_type_check_for_optional_builtin(self):
self.assertEqual(TestModel1(prop_0='', prop_1='value 1').prop_1, 'value 1')
self.assertEqual(TestModel1(prop_0='', prop_1=None).prop_1, None)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_1=1)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_1=TestModel1(prop_0='', prop_1='value 1'))
def test_handle_type_check_for_union_builtin(self):
self.assertEqual(TestModel1(prop_0='', prop_2='value 2').prop_2, 'value 2')
self.assertEqual(TestModel1(prop_0='', prop_2=22).prop_2, 22)
self.assertEqual(TestModel1(prop_0='', prop_2=True).prop_2, True)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_2=None)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_2=22.22)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_2=TestModel1(prop_0='', prop_2='value 2'))
def test_handle_type_check_for_class(self):
val3 = TestModel1(prop_0='value 0')
self.assertEqual(TestModel1(prop_0='', prop_3=val3).prop_3, val3)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_3=1)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_3='value 3')
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_3=[val3])
def test_handle_type_check_for_dict_class(self):
val4 = TestModel1(prop_0='value 0')
self.assertEqual(TestModel1(prop_0='', prop_4={'key 4': val4}).prop_4['key 4'], val4)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_4=1)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_4='value 4')
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_4=[val4])
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_4={42: val4})
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_4={'key 4': [val4]})
def test_handle_ty
|
pe_check_for_union_dict_class(self):
val5 = TestModel1(prop_0='value 0')
self.assertEqual(TestModel1(prop_0='', prop_5=val5).prop_5, val5)
self.assertEqual(TestModel1(prop_0='', prop_5=[val5]).prop_5[0], val5)
self.assertEqual(TestModel1(prop_0='', prop_5=None).prop_5, None)
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_5=1)
with self.assertRa
|
ises(TypeError):
TestModel1(prop_0='', prop_5='value 5')
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_5={'key 5': 'value 5'})
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_5={42: val5})
with self.assertRaises(TypeError):
TestModel1(prop_0='', prop_5={'key 5': [val5]})
def test_handle_from_to_dict_for_simple_builtin(self):
struct0 = {'prop_0': 'value 0'}
obj0 = TestModel1.from_dict(struct0)
self.assertEqual(obj0.prop_0, 'value 0')
self.assertDictEqual(obj0.to_dict(), struct0)
with self.assertRaises(AttributeError): #TypeError:
TestModel1.from_dict(None)
with self.assertRaises(AttributeError): #TypeError:
TestModel1.from_dict('')
with self.assertRaises(TypeError):
TestModel1.from_dict({})
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop0': 'value 0'})
def test_handle_from_to_dict_for_optional_builtin(self):
struct11 = {'prop_0': '', 'prop1': 'value 1'}
obj11 = TestModel1.from_dict(struct11)
self.assertEqual(obj11.prop_1, struct11['prop1'])
self.assertDictEqual(obj11.to_dict(), struct11)
struct12 = {'prop_0': '', 'prop1': None}
obj12 = TestModel1.from_dict(struct12)
self.assertEqual(obj12.prop_1, None)
self.assertDictEqual(obj12.to_dict(), {'prop_0': ''})
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop_0': '', 'prop 1': ''})
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop_0': '', 'prop1': 1})
def test_handle_from_to_dict_for_union_builtin(self):
struct21 = {'prop_0': '', 'prop 2': 'value 2'}
obj21 = TestModel1.from_dict(struct21)
self.assertEqual(obj21.prop_2, struct21['prop 2'])
self.assertDictEqual(obj21.to_dict(), struct21)
struct22 = {'prop_0': '', 'prop 2': 22}
obj22 = TestModel1.from_dict(struct22)
self.assertEqual(obj22.prop_2, struct22['prop 2'])
self.assertDictEqual(obj22.to_dict(), struct22)
struct23 = {'prop_0': '', 'prop 2': True}
obj23 = TestModel1.from_dict(struct23)
self.assertEqual(obj23.prop_2, struct23['prop 2'])
self.assertDictEqual(obj23.to_dict(), struct23)
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop_0': 'ZZZ', 'prop 2': None})
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop_0': '', 'prop 2': 22.22})
def test_handle_from_to_dict_for_class(self):
val3 = TestModel1(prop_0='value 0')
struct31 = {'prop_0': '', '@@': val3.to_dict()} #{'prop_0': '', '@@': TestModel1(prop_0='value 0')} is also valid for from_dict, but this cannot happen when parsing for real
obj31 = TestModel1.from_dict(struct31)
self.assertEqual(obj31.prop_3, val3)
self.assertDictEqual(obj31.to_dict(), struct31)
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop_0': '', '@@': 'value 3'})
def test_handle_from_to_dict_for_dict_class(self):
val4 = TestModel1(prop_0='value 0')
struct41 = {'prop_0': '', 'prop_4': {'val 4': val4.to_dict()}}
obj41 = TestModel1.from_dict(struct41)
self.assertEqual(obj41.prop_4['val 4'], val4)
self.assertDictEqual(obj41.to_dict(), struct41)
with self.assertRaises(TypeError):
TestModel1.from_dict({'prop_0': '', 'prop_4': {44: val4.to_dict()}})
def test_handle_from_to_dict_for_union_dict_class(self):
val5 = TestModel1(prop_0='value 0')
struct51 = {'prop_0': '', 'prop_5': val5.to_dict()}
obj51 = TestModel1.from_dict(struct51)
self.assertEqual(obj51.prop_5, val5)
self.assertDictEqual(obj51.
|
zknyoyo/chartkick.py
|
examples/demo/charts/urls.py
|
Python
|
mit
| 128
| 0.007813
|
from django.conf.ur
|
ls import patterns, url
urlpatterns = patterns('',
url(r'
|
^$', 'charts.views.charts', name='charts'),
)
|
lucyzee/plivo_apps
|
outbound_call.py
|
Python
|
mit
| 871
| 0.014925
|
import plivo, plivoxml
auth_id = "MAODU4MTK1MDC0NTBMMM"
auth_token = "MWVkNWNlZWFlYjRmYmViNDBiZD
|
AwNjA0NjA5OTQz"
p = plivo.RestAPI(auth_id, auth_token)
params = {
'to': '14153163136', # The phone numer to which the all has to be placed
'from' : '1111111111', # The phone number to be used as the caller id
'answer_url' : "http://morning-ocean-4669.herokuapp.com/speak/", # The URL invoked by Plivo when the outbound call is answered
'answer_method' : "GET", # T
|
he method used to call the answer_url
# Example for Asynchrnous request
#'callback_url' : "http://morning-ocean-4669.herokuapp.com/callback/", # The URL notified by the API response is available and to which the response is sent.
#'callback_method' : "GET" # The method used to notify the callback_url.
}
# Make an outbound call
response = p.make_call(params)
print str(response)
|
huaijiefeng/contact-sync-server
|
contactsync/urls.py
|
Python
|
apache-2.0
| 759
| 0.003953
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'ssssss.views.home', name='home'),
# url(r'^ssssss/', include('ssssss.foo.urls')),
|
# Uncomment the admin/doc line below to enable admin
|
documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'contactsync.views.home', name='home'),
)
|
matus-chochlik/various
|
relfs/relfs/fuse/mount_root.py
|
Python
|
mit
| 2,188
| 0.003199
|
# coding=utf-8
#------------------------------------------------------------------------------#
import os
import time
import fuse
import errno
from .item import RelFuseItem
from .static_dir import StaticDirectory
#------------------------------------------------------------------------------#
class MountRoot(RelFuseItem):
# --------------------------------------------------------------------------
def __init__(self):
RelFuseItem.__init__(self)
self._mount_time = time.time()
self._relfs_dir = StaticDirectory()
self._repos_backstage = self._relfs_dir.add("repos", StaticDirectory())
self._repos = dict()
# --------------------------------------------------------------------------
def add_repo_root(self, name, item):
self._repos[name] = item
# --------------------------------------------------------------------------
def repos_backstage(self):
return self._repos_backstage
# --------------------------------------------------------------------------
def find_item(self, split_path):
if not split_path or split_path == ["."]:
return self
if split_path[0] == ".relfs":
return self._relfs_dir.find_item(split_path[1:])
try:
repo = self._repos[split_path[0]]
return repo.find_item(split_p
|
ath[1:])
except KeyError:
pass
# --------------------------------------------------------------------------
def readdir(self, fh):
yield ".."
yield "."
yield ".relfs"
for name in self._repos:
yield name
# --
|
------------------------------------------------------------------------
def _modify_time(self):
return self._mount_time
# --------------------------------------------------------------------------
def access(self, mode):
if mode & os.X_OK:
return 0
return RelFuseItem.access(self, mode)
# --------------------------------------------------------------------------
def _get_mode(self):
return 0o40550
#------------------------------------------------------------------------------#
|
FSource/Faeris
|
tool/binpy/libpy/files/Dir.py
|
Python
|
mit
| 703
| 0.069701
|
import os
def Dir_toStdName(path):
if not (path[-1]=="/" or path[-1] == "//"):
path=path+"/"
return path
def Dir_getFiles(path):
path=Dir_toStdName(path)
allfiles=[]
files=os.listdir(path)
for f in files:
abs_path = path +
|
f
if os.path.isdir(abs_path):
sub_files=Dir_getFiles(abs_path)
sub_files=[ f+'/'+i for i in sub_files ]
allfiles.extend(sub_files)
else:
allfiles.append(f)
return allfiles
class Dir:
def __init__(self,dir_name):
self.m_dir=Dir_toStdName(dir_name)
def listDir(self):
return os.listdir(self.m_dir)
def listFiles(self):
return Dir_getFiles(self.m_dir)
if __name__ == "__main__":
|
d=Dir("../../")
print d.listFiles()
|
MOOCworkbench/MOOCworkbench
|
marketplace/migrations/0012_auto_20170604_1335.py
|
Python
|
mit
| 925
| 0.002162
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017
|
-06-04 13:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketplace', '0011_auto_20170526_1215'),
]
operations = [
migrations.AlterField(
model_name='externalpackage',
name='project_page',
field=models.URLField(help_text='URL to the project page of the package, for example to the PyPi location'),
),
migrations.AlterField(
|
model_name='packageresource',
name='resource',
field=models.TextField(help_text='Markdown allowed'),
),
migrations.AlterField(
model_name='packageresource',
name='url',
field=models.URLField(blank=True, help_text='URL to resource (optional)', null=True),
),
]
|
Tecnativa/website
|
website_breadcrumb/models/website.py
|
Python
|
agpl-3.0
| 1,026
| 0
|
# -*- coding: utf
|
-8 -*-
# © 20
|
15 Grupo ESOC Ingeniería de Servicios, S.L.U. - Jairo Llopis
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from openerp import api, models
class WebsiteMenu(models.Model):
_inherit = "website.menu"
@api.multi
def get_parents(self, revert=False, include_self=False):
"""List current menu's parents.
:param bool revert:
Indicates if the result must be revert before returning.
Activating this will mean that the result will be ordered from
parent to child.
:param bool include_self:
Indicates if the current menu item must be included in the result.
:return list:
Menu items ordered from child to parent, unless ``revert=True``.
"""
result = list()
menu = self if include_self else self.parent_id
while menu:
result.append(menu)
menu = menu.parent_id
return reversed(result) if revert else result
|
vgrem/Office365-REST-Python-Client
|
examples/directory/delete_groups_batch.py
|
Python
|
mit
| 289
| 0.00346
|
from examples import acqu
|
ire_token_by_username_password
from office365.graph_client import GraphClient
client = GraphClient(acquire_token_by_username_password)
groups = client.groups.get().top(1).execute_query()
for cur_grp in groups:
|
cur_grp.delete_object()
client.execute_batch()
|
johanvdw/Fiona
|
tests/test_drivers.py
|
Python
|
bsd-3-clause
| 760
| 0.001316
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfil
|
e = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log =
|
open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
sauloal/cnidaria
|
scripts/venv/lib/python2.7/site-packages/ete2/__init__.py
|
Python
|
mit
| 2,644
| 0.008699
|
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
# Note that the use of "from x import *" is safe here. Modules include
# the __all__ variable.
from warnings import warn
try:
import numpy
except ImportError, e:
numpy = None
#warn("Clustering module could not be loaded. Is numpy installed?")
#warn(e)
from ncbi_taxonomy import *
from coretype.tree import *
from coretype.seqgroup import *
from phylo.phylotree import *
from evol.evoltree import *
from webplugin.webapp import *
from phylox
|
ml import Phyloxml, PhyloxmlTree
from nexml import Nexml, NexmlTree
from evol import EvolTree
from coretype.arraytable import *
from clustering.clustertree import *
try:
from phylomedb.phylomeDB3 import *
except ImportError, e:
pass
#warn("MySQLdb module could not be loaded")
#warn(e)
try:
from tree
|
view.svg_colors import *
from treeview.main import *
from treeview.faces import *
from treeview import faces
from treeview import layouts
except ImportError, e:
#print e
pass
#warn("Treeview module could not be loaded")
#warn(e)
try:
from version import __version__, __installid__
except ImportError:
__version__ = 'dev'
__installid__ = None
|
mrustl/flopy
|
autotest/t004_test.py
|
Python
|
bsd-3-clause
| 19,118
| 0.004551
|
import os
import shutil
import numpy as np
import flopy
from flopy.utils.util_array import Util2d, Util3d, Transient2d
out_dir = "temp"
if os.path.exists(out_dir):
shutil.rmtree(out_dir)
os.mkdir(out_dir)
def test_transient2d():
ml = flopy.modflow.Modflow()
dis = flopy.modflow.ModflowDis(ml, nlay=10, nrow=10, ncol=10, nper=3)
t2d = Transient2d(ml, (10, 10), np.float32, 10., "fake")
a1 = t2d.array
assert a1.shape == (3, 1, 10, 10), a1.shape
t2d.cnstnt = 2.0
assert np.array_equal(t2d.array, np.zeros((3, 1, 10, 10)) + 20.0)
t2d[0] = 1.0
t2d[2] = 999
assert np.array_equal(t2d[0].array,np.ones((ml.nrow,ml.ncol)))
assert np.array_equal(t2d[2].array,np.ones((ml.nrow,ml.ncol))*999)
m4d = t2d.array
t2d2 = Transient2d.from_4d(ml,"rch",{"rech":m4d})
m4d2 = t2d2.array
assert np.array_equal(m4d,m4d2)
def test_util2d():
ml = flopy.modflow.Modflow()
u2d = Util2d(ml, (10, 10), np.float32, 10., "test")
a1 = u2d.array
a2 = np.ones((10, 10), dtype=np.float32) * 10.
assert np.array_equal(a1, a2)
# test external filenames - ascii and binary
fname_ascii = os.path.join(out_dir, 'test_a.dat')
fname_bin = os.path.join(out_dir, 'test_b.dat')
np.savetxt(fname_ascii,a1,fmt="%15.6E")
u2d.write_bin(a1.shape,fname_bin,a1,bintype="head")
dis = flopy.modflow.ModflowDis(ml,2,10,10)
lpf = flopy.modflow.ModflowLpf(ml,hk=[fname_ascii,fname_bin])
ml.lpf.hk[1].fmtin = "(BINARY)"
assert np.array_equal(lpf.hk[0].array,a1)
assert np.array_equal(lpf.hk[1].array,a1)
# test external filenames - ascii and binary with model_ws and external_path
ml = flopy.modflow.Modflow(model_ws="temp",external_path="ref")
u2d = Util2d(ml, (10, 10), np.float32, 10., "test")
fname_ascii = os.path.join(out_dir, 'test_a.dat')
fname_bin = os.path.join(out_dir, 'test_b.dat')
np.savetxt(fname_ascii,a1,fmt="%15.6E")
u2d.write_bin(a1.shape,fname_bin,a1,bintype="head")
dis = flopy.modflow.ModflowD
|
is(ml,2,10,10)
lpf = flopy.modflow.ModflowLpf(ml,hk=[fname_ascii,fname_bin])
ml.lpf.hk[1].fmtin = "(BINARY)"
assert np.array_equal(lpf.hk[0].array,a1)
assert np.array_equal(lpf.hk[1].array,a1)
# bin read write test
fname = os.path.join(out_dir, 'test.bin')
u2d.write_bin((10, 10), fname, u2d.ar
|
ray)
a3 = u2d.load_bin((10, 10), fname, u2d.dtype)[1]
assert np.array_equal(a3, a1)
# ascii read write test
fname = os.path.join(out_dir, 'text.dat')
u2d.write_txt((10, 10), fname, u2d.array)
a4 = u2d.load_txt((10, 10), fname, u2d.dtype, "(FREE)")
assert np.array_equal(a1, a4)
# fixed format read/write with touching numbers - yuck!
data = np.arange(100).reshape(10, 10)
u2d_arange = Util2d(ml, (10, 10), np.float32, data, "test")
u2d_arange.write_txt((10, 10), fname, u2d_arange.array, python_format=[7, "{0:10.4E}"])
a4a = u2d.load_txt((10, 10), fname, np.float32, "(7E10.6)")
assert np.array_equal(u2d_arange.array, a4a)
# test view vs copy with .array
a5 = u2d.array
a5 += 1
assert not np.array_equal(a5, u2d.array)
# Util2d.__mul__() overload
new_2d = u2d * 2
assert np.array_equal(new_2d.array, u2d.array * 2)
# test the cnstnt application
u2d.cnstnt = 2.0
a6 = u2d.array
assert not np.array_equal(a1, a6)
u2d.write_txt((10, 10), fname, u2d.array)
a7 = u2d.load_txt((10, 10), fname, u2d.dtype, "(FREE)")
assert np.array_equal(u2d.array, a7)
return
def stress_util2d(ml, nlay, nrow, ncol):
dis = flopy.modflow.ModflowDis(ml, nlay=nlay, nrow=nrow, ncol=ncol)
hk = np.ones((nlay, nrow, ncol))
vk = np.ones((nlay, nrow, ncol)) + 1.0
# save hk up one dir from model_ws
fnames = []
for i, h in enumerate(hk):
fname = os.path.join(out_dir, "test_{0}.ref".format(i))
fnames.append(fname)
np.savetxt(fname, h, fmt="%15.6e", delimiter='')
vk[i] = i + 1.
lpf = flopy.modflow.ModflowLpf(ml, hk=fnames, vka=vk)
# util2d binary check
ml.lpf.vka[0].format.binary = True
# util3d cnstnt propogation test
ml.lpf.vka.cnstnt = 2.0
ml.write_input()
# check that binary is being respect - it can't get no respect!
vka_1 = ml.lpf.vka[0]
a = vka_1.array
vka_1_2 = vka_1 * 2.0
assert np.array_equal(a * 2.0,vka_1_2.array)
if ml.external_path is not None:
files = os.listdir(os.path.join(ml.model_ws, ml.external_path))
else:
files = os.listdir(ml.model_ws)
print("\n\nexternal files: " + ','.join(files) + '\n\n')
ml1 = flopy.modflow.Modflow.load(ml.namefile,
model_ws=ml.model_ws,
verbose=True, forgive=False)
print("testing load")
assert ml1.load_fail == False
# check that both binary and cnstnt are being respected through
# out the write and load process.
assert np.array_equal(ml1.lpf.vka.array, vk * 2.0)
assert np.array_equal(ml1.lpf.vka.array, ml.lpf.vka.array)
assert np.array_equal(ml1.lpf.hk.array, hk)
assert np.array_equal(ml1.lpf.hk.array, ml.lpf.hk.array)
print("change model_ws")
ml.model_ws = out_dir
ml.write_input()
if ml.external_path is not None:
files = os.listdir(os.path.join(ml.model_ws, ml.external_path))
else:
files = os.listdir(ml.model_ws)
print("\n\nexternal files: " + ','.join(files) + '\n\n')
ml1 = flopy.modflow.Modflow.load(ml.namefile,
model_ws=ml.model_ws,
verbose=True, forgive=False)
print("testing load")
assert ml1.load_fail == False
assert np.array_equal(ml1.lpf.vka.array, vk * 2.0)
assert np.array_equal(ml1.lpf.hk.array, hk)
# more binary testing
ml.lpf.vka[0]._array[0,0] *= 3.0
ml.write_input()
ml1 = flopy.modflow.Modflow.load(ml.namefile,
model_ws=ml.model_ws,
verbose=True, forgive=False)
assert np.array_equal(ml.lpf.vka.array,ml1.lpf.vka.array)
assert np.array_equal(ml.lpf.hk.array,ml1.lpf.hk.array)
def stress_util2d_for_joe_the_file_king(ml, nlay, nrow, ncol):
dis = flopy.modflow.ModflowDis(ml, nlay=nlay, nrow=nrow, ncol=ncol)
hk = np.ones((nlay, nrow, ncol))
vk = np.ones((nlay, nrow, ncol)) + 1.0
# save hk up one dir from model_ws
fnames = []
for i, h in enumerate(hk):
fname = os.path.join("test_{0}.ref".format(i))
fnames.append(fname)
np.savetxt(fname, h, fmt="%15.6e", delimiter='')
vk[i] = i + 1.
lpf = flopy.modflow.ModflowLpf(ml, hk=fnames, vka=vk)
ml.lpf.vka[0].format.binary = True
ml.lpf.vka.cnstnt = 2.0
ml.write_input()
assert np.array_equal(ml.lpf.hk.array,hk)
assert np.array_equal(ml.lpf.vka.array,vk * 2.0)
ml1 = flopy.modflow.Modflow.load(ml.namefile,
model_ws=ml.model_ws,
verbose=True, forgive=False)
print("testing load")
assert ml1.load_fail == False
assert np.array_equal(ml1.lpf.vka.array, vk * 2.0)
assert np.array_equal(ml1.lpf.hk.array, hk)
assert np.array_equal(ml1.lpf.vka.array, ml.lpf.vka.array)
assert np.array_equal(ml1.lpf.hk.array, ml.lpf.hk.array)
# more binary testing
ml.lpf.vka[0]._array[0,0] *= 3.0
ml.write_input()
ml1 = flopy.modflow.Modflow.load(ml.namefile,
model_ws=ml.model_ws,
verbose=True, forgive=False)
assert np.array_equal(ml.lpf.vka.array,ml1.lpf.vka.array)
assert np.array_equal(ml.lpf.hk.array,ml1.lpf.hk.array)
def test_util2d_external_free():
model_ws = os.path.join(out_dir, "extra_temp")
if os.path.exists(model_ws):
shutil.rmtree(model_ws)
os.mkdir(model_ws)
ml = flopy.modflow.Modflow(model_ws=model_ws)
stress_util2d(ml, 1, 1, 1)
stress_util2d(ml, 10, 1, 1)
stress_util2d(ml, 1, 10, 1)
stress_util2d(ml, 1, 1, 10)
stress_util2d(ml, 10, 10, 1)
stress_util2d(ml, 1, 10, 10)
stress_util2d(ml, 1
|
pledra/odoo-product-configurator
|
product_configurator_sale_mrp/models/sale.py
|
Python
|
agpl-3.0
| 193
| 0
|
from odoo import fields, models
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
bom_id
|
= fields.Many2one(
comodel_name='mrp.bom',
readonly=True
|
)
|
eXcomm/gratipay.com
|
gratipay/elsewhere/google.py
|
Python
|
cc0-1.0
| 1,648
| 0.001214
|
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.elsewhere import PlatformOAuth2
from gratipay.elsewhere._extractors import any_key, key
from gratipay.elsewhere._paginators import query_param_paginator
class Google(PlatformOAuth2):
# Platform att
|
ributes
name = 'googl
|
e'
display_name = 'Google'
account_url = 'https://plus.google.com/{user_id}'
optional_user_name = True
# Auth attributes
auth_url = 'https://accounts.google.com/o/oauth2/auth'
access_token_url = 'https://accounts.google.com/o/oauth2/token'
oauth_default_scope = ['https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/plus.login']
# API attributes
api_format = 'json'
api_paginator = query_param_paginator('pageToken',
next='nextPageToken',
page='items',
total='totalItems')
api_url = 'https://www.googleapis.com/plus/v1'
api_user_info_path = '/people/{user_id}'
api_user_self_info_path = '/people/me'
api_friends_path = '/people/{user_id}/people/visible'
api_friends_limited = True
# User info extractors
x_user_id = key('id')
x_display_name = key('displayName')
x_email = any_key(('emails', 0), clean=lambda d: d.get('value'))
x_avatar_url = key('image', clean=lambda d: d.get('url'))
def x_user_name(self, extracted, info, *default):
url = info.get('url', '')
return url[25:] if url.startswith('https://plus.google.com/+') else None
|
aymeric-spiga/planetoplot
|
html/cgi-bin/maincgi.py
|
Python
|
gpl-2.0
| 1,620
| 0.025926
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import cgi, cgitb
import sys
sys.path.insert(0, "../planetoplot/modules")
import ppplot
import ppclass
########################################
import numpy as np
xx = np.arange(25)
yy = 3.*xx
fig = ppplot.figuref(x=8,y=6)
pl = ppplot.plot1d()
pl.fig = fig # have to send to figure
pl.f = yy
pl.x = xx
pl.make
|
()
######################################## more sophisticated example
## RETRIEVE DATA
#from
|
ppclass import pp
#fifi = "/home/aspiga/soft/mcd_python/minimal_server/cgi-bin/wrfout_d01_2024-10-04_06z00z00_zabg"
#ff,xx,yy,zz,tt = pp(file=fifi,var="HGT",z=0,t=0).getfd()
#xx = pp(file=fifi,var="XLONG",z=0,t=0).getf()
#yy = pp(file=fifi,var="XLAT",z=0,t=0).getf()
#uu = pp(file=fifi,var="Um",z=0,t=0).getf()
#vv = pp(file=fifi,var="Vm",z=0,t=0).getf()
#
## PLOT
#pl = ppplot.plot2d()
#pl.fig = fig # have to send to figure
#pl.f = ff
#pl.x = xx
#pl.y = yy
#pl.vx = uu
#pl.vy = vv
#pl.legend = "yorgl"
#pl.marker = None
#pl.nyticks = 20
#pl.ylabel = "YAARGL"
#pl.proj = "laea"
#pl.make()
########################################
# create figure
ppplot.sendagg(fig,filename='webapp.png', dpi=150)
# for debugging in web browser
cgitb.enable()
## Create instance of FieldStorage
#form = cgi.FieldStorage()
##### NOW WRITE THE HTML PAGE TO USER
print "Content-type:text/html;charset=utf-8\n"
print #Apache needs a space after content-type
header="""<html><head><title>Mars Climate Database: The Web Interface</title></head><body>"""
print header
print "THIS IS A TEST!"
print "<img src='../webapp.png'><br />"
bottom = "</body></html>"
print bottom
|
mitsuhiko/sentry
|
tests/acceptance/test_auth.py
|
Python
|
bsd-3-clause
| 1,321
| 0.000757
|
from __future__ import absolute_import
from sentry.testutils import AcceptanceTestCase
class AuthTest(AcceptanceTestCase):
def enter_auth(self, username, password):
# disable captcha as it makes these tests flakey (and requires waiting
# on external resources)
with self.settings(RECAPTCHA_PUBLIC_KEY=None):
self.browser.get('/auth/login/')
self.browser.find_element_by_id('id_username').send_keys(username)
self.browser.find_element_by_id('id_password').send_keys(password)
self.browser.find_element_by_xpath("//button[contains(text(), 'Login')]").click()
def test_renders(self):
self.browser.get('/auth/login/')
self.browser.snapshot
|
(name='l
|
ogin')
def test_no_credentials(self):
self.enter_auth('', '')
self.browser.snapshot(name='login fields required')
def test_invalid_credentials(self):
self.enter_auth('bad-username', 'bad-username')
self.browser.snapshot(name='login fields invalid')
def test_success(self):
email = 'dummy@example.com'
password = 'dummy'
user = self.create_user(email=email)
user.set_password(password)
user.save()
self.enter_auth(email, password)
self.browser.snapshot(name='login success')
|
ardi69/pyload-0.4.10
|
pyload/plugin/hoster/BayfilesCom.py
|
Python
|
gpl-3.0
| 468
| 0.014957
|
# -*- coding: utf-8 -*-
from pyload.plugin.internal.DeadHoster import DeadHoster
|
class BayfilesCom(DeadHoster):
__name = "BayfilesCom"
__type = "hoster"
__version = "0.09"
__pattern = r'https?://(?:www\.)?bayfiles\.(com|net)/file/(?P<ID>\w+/\w
|
+/[^/]+)'
__config = [] #@TODO: Remove in 0.4.10
__description = """Bayfiles.com hoster plugin"""
__license = "GPLv3"
__authors = [("Walter Purcaro", "vuolter@gmail.com")]
|
fotinakis/sentry
|
src/sentry/utils/auth.py
|
Python
|
bsd-3-clause
| 6,403
| 0.000156
|
"""
sentry.utils.auth
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import six
import logging
from django.conf import settings
from django.contrib.auth import login as _
|
login
from django.contrib.auth.backends import ModelBackend
from django.core.urlresolvers import reverse, resolve
from time import time
from sentry.models import User, Authenticator
logger = logging.getLogger('sen
|
try.auth')
_LOGIN_URL = None
class AuthUserPasswordExpired(Exception):
def __init__(self, user):
self.user = user
def _make_key_value(val):
return val.strip().split('=', 1)
def parse_auth_header(header):
try:
return dict(map(_make_key_value, header.split(' ', 1)[1].split(',')))
except Exception:
return {}
def get_auth_providers():
return [
key for key, cfg_names
in six.iteritems(settings.AUTH_PROVIDERS)
if all(getattr(settings, c, None) for c in cfg_names)
]
def get_pending_2fa_user(request):
rv = request.session.get('_pending_2fa')
if rv is None:
return
user_id, created_at = rv
if created_at < time() - 60 * 5:
return None
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
pass
def has_pending_2fa(request):
return request.session.get('_pending_2fa') is not None
def get_login_url(reset=False):
global _LOGIN_URL
if _LOGIN_URL is None or reset:
# if LOGIN_URL resolves force login_required to it instead of our own
# XXX: this must be done as late as possible to avoid idempotent requirements
try:
resolve(settings.LOGIN_URL)
except Exception:
_LOGIN_URL = settings.SENTRY_LOGIN_URL
else:
_LOGIN_URL = settings.LOGIN_URL
if _LOGIN_URL is None:
_LOGIN_URL = reverse('sentry-login')
return _LOGIN_URL
def initiate_login(request, next_url=None):
try:
del request.session['_after_2fa']
except KeyError:
pass
try:
del request.session['_pending_2fa']
except KeyError:
pass
if next_url:
request.session['_next'] = next_url
else:
try:
del request.session['_next']
except KeyError:
pass
def get_login_redirect(request, default=None):
if default is None:
default = get_login_url()
# If there is a pending 2fa authentication bound to the session then
# we need to go to the 2fa dialog.
if has_pending_2fa(request):
return reverse('sentry-2fa-dialog')
# If we have a different URL to go after the 2fa flow we want to go to
# that now here.
after_2fa = request.session.pop('_after_2fa', None)
if after_2fa is not None:
return after_2fa
login_url = request.session.pop('_next', None) or default
if login_url.startswith(('http://', 'https://')):
login_url = default
elif login_url.startswith(get_login_url()):
login_url = default
return login_url
def find_users(username, with_valid_password=True, is_active=None):
"""
Return a list of users that match a username
and falling back to email
"""
qs = User.objects
if is_active is not None:
qs = qs.filter(is_active=is_active)
if with_valid_password:
qs = qs.exclude(password='!')
try:
# First, assume username is an iexact match for username
user = qs.get(username__iexact=username)
return [user]
except User.DoesNotExist:
# If not, we can take a stab at guessing it's an email address
if '@' in username:
# email isn't guaranteed unique
return list(qs.filter(email__iexact=username))
return []
def login(request, user, passed_2fa=False, after_2fa=None):
"""This logs a user in for the sesion and current request. If 2FA is
enabled this method will start the 2FA flow and return False, otherwise
it will return True. If `passed_2fa` is set to `True` then the 2FA flow
is set to be finalized (user passed the flow).
Optionally `after_2fa` can be set to a URL which will be used to override
the regular session redirect target directly after the 2fa flow.
"""
has_2fa = Authenticator.objects.user_has_2fa(user)
if has_2fa and not passed_2fa:
request.session['_pending_2fa'] = [user.id, time()]
if after_2fa is not None:
request.session['_after_2fa'] = after_2fa
return False
request.session.pop('_pending_2fa', None)
# Check for expired passwords here after we cleared the 2fa flow.
# While this means that users will have to pass 2fa before they can
# figure out that their passwords are expired this is still the more
# reasonable behavior.
#
# We also rememebr _after_2fa here so that we can continue the flow if
# someone does it in the same browser.
if user.is_password_expired:
raise AuthUserPasswordExpired(user)
# If there is no authentication backend, just attach the first
# one and hope it goes through. This apparently is a thing we
# have been doing for a long time, just moved it to a more
# reasonable place.
if not hasattr(user, 'backend'):
user.backend = settings.AUTHENTICATION_BACKENDS[0]
_login(request, user)
log_auth_success(request, user.username)
return True
def log_auth_success(request, username):
logger.info('user.auth.success', extra={
'ip_address': request.META['REMOTE_ADDR'],
'username': username,
})
def log_auth_failure(request, username=None):
logger.info('user.auth.fail', extra={
'ip_address': request.META['REMOTE_ADDR'],
'username': username,
})
class EmailAuthBackend(ModelBackend):
"""
Authenticate against django.contrib.auth.models.User.
Supports authenticating via an email address or a username.
"""
def authenticate(self, username=None, password=None):
users = find_users(username)
if users:
for user in users:
try:
if user.password and user.check_password(password):
return user
except ValueError:
continue
return None
|
mobo95/pyload
|
module/plugins/hooks/AndroidPhoneNotify.py
|
Python
|
gpl-3.0
| 2,646
| 0.016251
|
# -*- coding: utf-8 -*-
from time import time
from module.network.RequestFactory import getURL
from module.plugins.Hook import Hook
class AndroidPhoneNotify(Hook):
__name__ = "AndroidPhoneNotify"
__type__ = "hook"
__version__ = "0.04"
__config__ = [("apikey" , "str" , "API key" , "" ),
("notifycaptcha" , "bool", "Notify captcha request" , True ),
("notifypackage" , "bool", "Notify package finished" , True ),
("notifyprocessed", "bool", "Notify processed packages status" , True ),
("timeout" , "int" , "Timeout between captchas in seconds" , 5 ),
("force" , "bool", "Send notifications if client is connected", False)]
__description__ = """Send push notifications to your Android Phone using notifymyandroid.com"""
__license__ = "GPLv3"
__authors__ = [("Steven Kosyra", "steven.kosyra@gmail.com"),
("Walter Purcaro", "vuolter@gmail.com")]
event_list = ["allDownloadsProcessed"]
#@TODO: Remove in 0.4.10
def initPeriodical(self):
pass
def setup(self):
self.info = {} #@TODO: Remove in 0.4.10
self.last_notify = 0
def newCaptchaTask(self, task):
if not self.getConfig("notifycaptcha"):
return False
if time() - self.last_notify < self.getConf("timeout"):
return False
self.notify(
|
_("Captcha"), _("New
|
request waiting user input"))
def packageFinished(self, pypack):
if self.getConfig("notifypackage"):
self.notify(_("Package finished"), pypack.name)
def allDownloadsProcessed(self):
if not self.getConfig("notifyprocessed"):
return False
if any(True for pdata in self.core.api.getQueue() if pdata.linksdone < pdata.linkstotal):
self.notify(_("Package failed"), _("One or more packages was not completed successfully"))
else:
self.notify(_("All packages finished"))
def notify(self, event, msg=""):
apikey = self.getConfig("apikey")
if not apikey:
return False
if self.core.isClientConnected() and not self.getConfig("force"):
return False
getURL("http://www.notifymyandroid.com/publicapi/notify",
get={'apikey' : apikey,
'application': "pyLoad",
'event' : event,
'description': msg})
self.last_notify = time()
|
musicpax/funcy
|
funcy/py2.py
|
Python
|
bsd-3-clause
| 765
| 0.003922
|
import sys
from .calc import *
from .colls imp
|
ort
|
*
from .tree import *
from .decorators import *
from .funcolls import *
from .funcs import *
from .seqs import *
from .types import *
from .strings import *
from .flow import *
from .objects import *
from .namespaces import namespace
from .debug import *
from .primitives import *
# Setup __all__
modules = ('calc', 'colls', 'tree', 'decorators', 'funcolls', 'funcs', 'seqs', 'types',
'strings', 'flow', 'objects', 'namespaces', 'debug', 'primitives')
__all__ = cat(sys.modules['funcy.' + m].__all__ for m in modules)
# Python 2 style zip() for Python 3
from .cross import PY3
if PY3:
_zip = zip
def zip(*seqs):
return list(_zip(*seqs))
__all__.append('zip')
else:
zip = zip
|
mekolat/manachat
|
plugins/manaboy.py
|
Python
|
gpl-2.0
| 11,836
| 0.004562
|
import time
import net.mapserv as mapserv
import net.charserv as charserv
import commands
import walkto
import logicmanager
import status
import plugins
from collections import deque
from net.inventory import get_item_index, get_storage_index
from utils import extends
from actor import find_nearest_being
from chat import send_whisper as whisper
__all__ = [ 'PLUGIN', 'init' ]
PLUGIN = {
'name': 'manaboy',
'requires': ('chatbot', 'npc', 'autofollow'),
'blocks': (),
}
npcdialog = {
'start_time': -1,
'program': [],
}
_times = {
'follow': 0,
'where' : 0,
'status' : 0,
'inventory' : 0,
'say' : 0,
'zeny' : 0,
'storage' : 0,
}
admins = ['Trav', 'Travolta', 'Komornyik']
allowed_drops = [535, 719, 513, 727, 729, 869]
npc_owner = ''
history = deque(maxlen=10)
storage_is_open = False
def set_npc_owner(nick):
global npc_owner
if plugins.npc.npc_id < 0:
npc_owner = nick
@extends('smsg_being_remove')
def bot_dies(data):
if data.id == charserv.server.account:
mapserv.cmsg_player_respawn()
@extends('smsg_npc_message')
@extends('smsg_npc_choice')
@extends('smsg_npc_close')
@extends('smsg_npc_next')
@extends('smsg_npc_int_input')
@extends('smsg_npc_str_input')
def npc_activity(data):
npcdialog['start_time'] = time.time()
@extends('smsg_npc_message')
def npc_message(data):
if not npc_owner:
return
npc = mapserv.beings_cache.findName(data.id)
m = '[npc] {} : {}'.format(npc, data.message)
whisper(npc_owner, m)
@extends('smsg_npc_choice')
def npc_choice(data):
if not npc_owner:
return
choices = filter(lambda s: len(s.strip()) > 0,
data.select.split(':'))
whisper(npc_owner, '[npc][select] (use !input <number> to select)')
for i, s in enumerate(choices):
whisper(npc_owner, ' {}) {}'.format(i + 1, s))
@extends('smsg_npc_int_input')
@extends('smsg_npc_str_input')
def npc_input(data):
if not npc_owner:
return
t = 'number'
if plugins.npc.input_type == 'str':
t = 'string'
whisper(npc_owner, '[npc][input] (use !input <{}>)'.format(t))
@extends('smsg_storage_status')
def storage_status(data):
print 'storage_status'
global storage_is_open
storage_is_open = True
_times['storage'] = time.time()
if npc_owner:
whisper(npc_owner, '[storage]')
@extends('smsg_storage_items')
@extends('smsg_storage_equip')
def storage_items(data):
if not npc_owner:
return
ls = status.invlists2(max_length=255, source='storage')
for l in ls:
whisper(npc_owner, l)
@extends('smsg_storage_close')
def storage_close(data):
print 'smsg_storage_close'
global storage_is_open
storage_is_open = False
_times['storage'] = 0
def cmd_where(nick, message, is_whisper, match):
if not is_whisper:
return
msg = status.player_position()
whisper(nick, msg)
def cmd_goto(nick, message, is_whisper, match):
if not is_whisper:
return
try:
x = int(match.group(1))
y = int(match.group(2))
except ValueError:
return
set_npc_owner(nick)
plugins.autofollow.follow = ''
mapserv.cmsg_player_change_dest(x, y)
def cmd_goclose(nick, message, is_whisper, match):
if not is_whisper:
return
x = mapserv.player_pos['x']
y = mapserv.player_pos['y']
if message.startswith('!left'):
x -= 1
elif message.startswith('!right'):
x += 1
elif message.startswith('!up'):
y -= 1
elif message.startswith('!down'):
y += 1
set_npc_owner(nick)
plugins.autofollow.follow = ''
mapserv.cmsg_player_change_dest(x, y)
def cmd_pickup(nick, message, is_whisper, match):
if not is_whisper:
return
commands.pickup()
def cmd_drop(nick, message, is_whisper, match):
if not is_whisper:
return
try:
amount = int(match.group(1))
item_id = int(match.group(2))
except ValueError:
return
if nick not in admins:
if item_id not in allowed_drops:
return
index = get_item_index(item_id)
if index > 0:
mapserv.cmsg_player_inventory_drop(index, amount)
def cmd_item_action(nick, message, is_whisper, match):
if not is_whisper:
return
try:
itemId = int(match.group(1))
except ValueError:
return
index = get_item_index(itemId)
if index <= 0:
return
if message.startswith('!equip'):
mapserv.cmsg_player_equip(index)
elif message.startswith('!unequip'):
mapserv.cmsg_player_unequip(index)
elif message.startswith('!use'):
mapserv.cmsg_player_inventory_use(index, itemId)
def cmd_emote(nick, message, is_whisper, match):
if not is_whisper:
return
try:
emote = int(match.group(1))
except ValueError:
return
mapserv.cmsg_player_emote(emote)
def cmd_attack(nick, message, is_whisper, match):
if not is_whisper:
return
ta
|
rget_s = match.group(1)
try:
target = mapserv.beings_cache[int(target_s)]
except (ValueError, KeyError):
target = find_nearest_being(name=target_s,
|
ignored_ids=walkto.unreachable_ids)
if target is not None:
set_npc_owner(nick)
plugins.autofollow.follow = ''
walkto.walkto_and_action(target, 'attack')
def cmd_say(nick, message, is_whisper, match):
if not is_whisper:
return
msg = match.group(1)
whisper(nick, msg)
def cmd_sit(nick, message, is_whisper, match):
if not is_whisper:
return
plugins.autofollow.follow = ''
mapserv.cmsg_player_change_act(0, 2)
def cmd_turn(nick, message, is_whisper, match):
if not is_whisper:
return
commands.set_direction('', message[6:])
def cmd_follow(nick, message, is_whisper, match):
if not is_whisper:
return
if plugins.autofollow.follow == nick:
plugins.autofollow.follow = ''
else:
set_npc_owner(nick)
plugins.autofollow.follow = nick
def cmd_lvlup(nick, message, is_whisper, match):
if not is_whisper:
return
stat = match.group(1).lower()
stats = {'str': 13, 'agi': 14, 'vit': 15,
'int': 16, 'dex': 17, 'luk': 18}
skills = {'mallard': 45, 'brawling': 350, 'speed': 352,
'astral': 354, 'raging': 355, 'resist': 353}
if stat in stats:
mapserv.cmsg_stat_update_request(stats[stat], 1)
elif stat in skills:
mapserv.cmsg_skill_levelup_request(skills[stat])
def cmd_invlist(nick, message, is_whisper, match):
if not is_whisper:
return
ls = status.invlists(50)
for l in ls:
whisper(nick, l)
def cmd_inventory(nick, message, is_whisper, match):
if not is_whisper:
return
ls = status.invlists2(255)
for l in ls:
whisper(nick, l)
def cmd_status(nick, message, is_whisper, match):
if not is_whisper:
return
all_stats = ('stats', 'hpmp', 'weight', 'points',
'zeny', 'attack', 'skills')
sr = status.stats_repr(*all_stats)
whisper(nick, ' | '.join(sr.values()))
def cmd_zeny(nick, message, is_whisper, match):
if not is_whisper:
return
whisper(nick, 'I have {} GP'.format(mapserv.player_money))
def cmd_talk2npc(nick, message, is_whisper, match):
if not is_whisper:
return
npc_s = match.group(1)
jobs = []
name = ''
try:
jobs = [int(npc_s)]
except ValueError:
name = npc_s
b = find_nearest_being(name=name, type='npc', allowed_jobs=jobs)
if b is None:
return
set_npc_owner(nick)
plugins.autofollow.follow = ''
plugins.npc.npc_id = b.id
mapserv.cmsg_npc_talk(b.id)
def cmd_input(nick, message, is_whisper, match):
if not is_whisper:
return
plugins.npc.cmd_npcinput('', match.group(1))
def cmd_close(nick, message, is_whisper, match):
if not is_whisper:
return
if storage_is_open:
reset_storage()
else:
plugins.npc.cmd_npcclose()
def cmd_history(nick, message, is_
|
zachjanicki/osf.io
|
website/addons/figshare/model.py
|
Python
|
apache-2.0
| 13,145
| 0.000685
|
# -*- coding: utf-8 -*-
import markupsafe
from modularodm import fields
from framework.auth.decorators import Auth
from website.models import NodeLog
from website.addons.base import exceptions
from website.addons.base import AddonNodeSettingsBase, AddonUserSettingsBase
from website.addons.base import StorageAddonBase
from . import messages
from .api import Figshare
from . import settings as figshare_settings
class AddonFigShareUserSettings(AddonUserSettingsBase):
oauth_request_token = fields.StringField()
oauth_request_token_secret = fields.StringField()
oauth_access_token = fields.StringField()
oauth_access_token_secret = fields.StringField()
@property
def has_auth(self):
return self.oauth_access_token is not None
def to_json(self, user):
ret = super(AddonFigShareUserSettings, self).to_json(user)
ret.update({
'authorized': self.has_auth,
'name': self.owner.display_full_name(),
'profile_url': self.owner.profile_url,
})
return ret
def remove_auth(self, save=False):
self.oauth_access_token = None
self.oauth_access_token_secret = None
for node_settings in self.addonfigsharenodesettings__authorized:
node_settings.deauthorize(auth=Auth(user=self.owner), save=True)
if save:
self.save()
def delete(self, save=False):
self.remove_auth(save=False)
super(AddonFigShareUserSettings, self).delete(save=save)
class AddonFigShareNodeSettings(StorageAddonBase, AddonNodeSettingsBase):
figshare_id = fields.StringField()
figshare_type = fields.StringField()
figshare_title = fields.StringField()
user_settings = fields.ForeignField(
'addonfigshareusersettings', backref='authorized'
)
@property
def folder_name(self):
return self.figshare_title
def archive_errors(self):
api = Figshare.from_settings(self.user_settings)
items = []
if self.figshare_type in ('article', 'fileset'):
article = api.article(self, self.figshare_id)
items = article['items'] if article else []
else:
project = api.project(self, self.figshare_id)
items = project['articles'] if project else []
private = any(
[item for item in items if item['status'] != 'Public']
)
if private:
return 'The figshare {figshare_type} <strong>{figshare_title}</strong> contains private content that we cannot copy to the registration. If this content is made public on figshare we should then be able to copy those files. You can view those files <a href="{url}" target="_blank">here.</a>'.format(
figshare_type=markupsafe.escape(self.figshare_type),
figshare_title=markupsafe.escape(self.figshare_title),
url=self.owner.web_url_for('collect_file_trees'))
@property
def api_url(self):
if sel
|
f.user_settings is None:
return figshare_settings.API_URL
else:
return figshare_settings.API_OAUTH_URL
@property
def has_auth(self):
return bool(self.user_settings and self.user_settings.has_auth)
@property
def complete(self):
return self.has_auth and self.figshare_id is not None
@property
def linked_cont
|
ent(self):
return {
'id': self.figshare_id,
'type': self.figshare_type,
'name': self.figshare_title,
}
def authorize(self, user_settings, save=False):
self.user_settings = user_settings
node = self.owner
node.add_log(
action='figshare_node_authorized',
params={
'project': node.parent_id,
'node': node._id,
},
auth=Auth(user=user_settings.owner),
)
if save:
self.save()
def deauthorize(self, auth=None, add_log=True, save=False):
"""Remove user authorization from this node and log the event."""
self.user_settings = None
self.figshare_id = None
self.figshare_type = None
self.figshare_title = None
if add_log:
node = self.owner
self.owner.add_log(
action='figshare_node_deauthorized',
params={
'project': node.parent_id,
'node': node._id,
},
auth=auth,
)
if save:
self.save()
def serialize_waterbutler_credentials(self):
if not self.has_auth:
raise exceptions.AddonError('Cannot serialize credentials for unauthorized addon')
return {
'client_token': figshare_settings.CLIENT_ID,
'client_secret': figshare_settings.CLIENT_SECRET,
'owner_token': self.user_settings.oauth_access_token,
'owner_secret': self.user_settings.oauth_access_token_secret,
}
def serialize_waterbutler_settings(self):
if not self.figshare_type or not self.figshare_id:
raise exceptions.AddonError('Cannot serialize settings for unconfigured addon')
return {
'container_type': self.figshare_type,
'container_id': str(self.figshare_id),
}
def create_waterbutler_log(self, auth, action, metadata):
if action in [NodeLog.FILE_ADDED, NodeLog.FILE_UPDATED]:
name = metadata['name']
url = self.owner.web_url_for('addon_view_or_download_file', provider='figshare', path=metadata['path'])
urls = {
'view': url,
'download': url + '?action=download'
}
elif action == NodeLog.FILE_REMOVED:
name = metadata['path']
urls = {}
self.owner.add_log(
'figshare_{0}'.format(action),
auth=auth,
params={
'project': self.owner.parent_id,
'node': self.owner._id,
'path': name,
'urls': urls,
'figshare': {
'id': self.figshare_id,
'type': self.figshare_type,
},
},
)
def delete(self, save=False):
super(AddonFigShareNodeSettings, self).delete(save=False)
self.deauthorize(add_log=False, save=save)
def update_fields(self, fields, node, auth):
updated = False
if fields.get('id'):
updated = updated or (fields['id'] != self.figshare_id)
self.figshare_id = fields['id']
if fields.get('name'):
updated = updated or (fields['name'] != self.figshare_title)
self.figshare_title = fields['name']
if fields.get('type'):
updated = updated or (fields['type'] != self.figshare_type)
self.figshare_type = fields['type']
self.save()
if updated:
node.add_log(
action='figshare_content_linked',
params={
'project': node.parent_id,
'node': node._id,
'figshare': {
'type': self.figshare_type,
'id': self.figshare_id,
'title': self.figshare_title,
},
},
auth=auth,
)
def to_json(self, user):
ret = super(AddonFigShareNodeSettings, self).to_json(user)
figshare_user = user.get_addon('figshare')
ret.update({
'figshare_id': self.figshare_id or '',
'figshare_type': self.figshare_type or '',
'figshare_title': self.figshare_title or '',
'node_has_auth': self.has_auth,
'user_has_auth': bool(figshare_user) and figshare_user.has_auth,
'figshare_options': [],
'is_registration': self.owner.is_registration,
})
if self.has_auth:
ret.update({
'authorized_user': self.user_settings.owner.fullname,
'owner_url': self.user_settings.owner.url,
|
nottombrown/rl-teacher
|
agents/pposgd-mpi/pposgd_mpi/cnn_policy.py
|
Python
|
mit
| 2,355
| 0.005945
|
from pposgd_mpi.common.mpi_running_mean_std import RunningMeanStd
import pposgd_mpi.common.tf_util as U
import tensorflow as tf
import gym
from pposgd_mpi.common.distributions import make_pdtype
class CnnPolicy(object):
recurrent = False
def __init__(self, name, ob_space, ac_space, kind='large'):
with tf.variable_scope(name):
self._init(ob_space, ac_space, kind)
self.scope = tf.get_variable_scope().name
def _init(self, ob_space, ac_space, kind):
assert isinstance(ob_space, gym.spaces.Box)
self.pdtype = pdtype = make_pdtype(ac_space)
sequence_length = None
ob = U.get_placeholder(name="obs", dtype=tf.float32, shape=[sequence_length] + list(ob_space.shape))
x = ob / 255.0
if kind == 'small': # from A3C paper
x = tf.nn.relu(U.conv2d(x, 16, "l1", [8, 8], [4, 4], pad="VALID"))
x = tf.nn
|
.relu(U.c
|
onv2d(x, 32, "l2", [4, 4], [2, 2], pad="VALID"))
x = U.flattenallbut0(x)
x = tf.nn.relu(U.dense(x, 256, 'lin', U.normc_initializer(1.0)))
elif kind == 'large': # Nature DQN
x = tf.nn.relu(U.conv2d(x, 32, "l1", [8, 8], [4, 4], pad="VALID"))
x = tf.nn.relu(U.conv2d(x, 64, "l2", [4, 4], [2, 2], pad="VALID"))
x = tf.nn.relu(U.conv2d(x, 64, "l3", [3, 3], [1, 1], pad="VALID"))
x = U.flattenallbut0(x)
x = tf.nn.relu(U.dense(x, 512, 'lin', U.normc_initializer(1.0)))
else:
raise NotImplementedError
logits = U.dense(x, pdtype.param_shape()[0], "logits", U.normc_initializer(0.01))
self.pd = pdtype.pdfromflat(logits)
self.vpred = U.dense(x, 1, "value", U.normc_initializer(1.0))[:,0]
self.state_in = []
self.state_out = []
stochastic = tf.placeholder(dtype=tf.bool, shape=())
ac = self.pd.sample() # XXX
self._act = U.function([stochastic, ob], [ac, self.vpred])
def act(self, stochastic, ob):
ac1, vpred1 = self._act(stochastic, ob[None])
return ac1[0], vpred1[0]
def get_variables(self):
return tf.get_collection(tf.GraphKeys.VARIABLES, self.scope)
def get_trainable_variables(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope)
def get_initial_state(self):
return []
|
jcftang/ansible
|
lib/ansible/modules/database/misc/kibana_plugin.py
|
Python
|
gpl-3.0
| 7,030
| 0.003414
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage elasticsearch shield role
(c) 2016, Thierno IB. BARRY @barryib
Sponsored by Polyconseil http://polyconseil.fr.
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
import os
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: kibana_plugin
short_description: Manage Kibana plugins
description:
- Manages Kibana plugins.
version_added: "2.2"
author: Thierno IB. BARRY (@barryib)
options:
name:
description:
- Name of the plugin to install
required: True
state:
description:
- Desired state of a plugin.
required: False
choices: ["prese
|
nt", "absent"]
default: present
url:
description:
- Set exact URL to download the plugin from.
For local file, prefix its absolute path with file://
required: False
default: None
timeout:
des
|
cription:
- "Timeout setting: 30s, 1m, 1h..."
required: False
default: 1m
plugin_bin:
description:
- Location of the plugin binary
required: False
default: /opt/kibana/bin/kibana
plugin_dir:
description:
- Your configured plugin directory specified in Kibana
required: False
default: /opt/kibana/installedPlugins/
version:
description:
- Version of the plugin to be installed.
If plugin exists with previous version, it will NOT be updated if C(force) is not set to yes
required: False
default: None
force:
description:
- Delete and re-install the plugin. Can be useful for plugins update
required: False
choices: ["yes", "no"]
default: no
'''
EXAMPLES = '''
- name: Install Elasticsearch head plugin
kibana_plugin:
state: present
name: elasticsearch/marvel
- name: Install specific version of a plugin
kibana_plugin:
state: present
name: elasticsearch/marvel
version: '2.3.3'
- name: Uninstall Elasticsearch head plugin
kibana_plugin:
state: absent
name: elasticsearch/marvel
'''
RETURN = '''
cmd:
description: the launched command during plugin mangement (install / remove)
returned: success
type: string
name:
description: the plugin name to install or remove
returned: success
type: string
url:
description: the url from where the plugin is installed from
returned: success
type: string
timeout:
description: the timout for plugin download
returned: success
type: string
stdout:
description: the command stdout
returned: success
type: string
stderr:
description: the command stderr
returned: success
type: string
state:
description: the state for the managed plugin
returned: success
type: string
'''
PACKAGE_STATE_MAP = dict(
present="--install",
absent="--remove"
)
def parse_plugin_repo(string):
elements = string.split("/")
# We first consider the simplest form: pluginname
repo = elements[0]
# We consider the form: username/pluginname
if len(elements) > 1:
repo = elements[1]
# remove elasticsearch- prefix
# remove es- prefix
for string in ("elasticsearch-", "es-"):
if repo.startswith(string):
return repo[len(string):]
return repo
def is_plugin_present(plugin_dir, working_dir):
return os.path.isdir(os.path.join(working_dir, plugin_dir))
def parse_error(string):
reason = "reason: "
try:
return string[string.index(reason) + len(reason):].strip()
except ValueError:
return string
def install_plugin(module, plugin_bin, plugin_name, url, timeout):
cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["present"], plugin_name]
if url:
cmd_args.append("--url %s" % url)
if timeout:
cmd_args.append("--timeout %s" % timeout)
cmd = " ".join(cmd_args)
if module.check_mode:
return True, cmd, "check mode", ""
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def remove_plugin(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["absent"], plugin_name]
cmd = " ".join(cmd_args)
if module.check_mode:
return True, cmd, "check mode", ""
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
url=dict(default=None),
timeout=dict(default="1m"),
plugin_bin=dict(default="/opt/kibana/bin/kibana", type="path"),
plugin_dir=dict(default="/opt/kibana/installedPlugins/", type="path"),
version=dict(default=None),
force=dict(default="no", type="bool")
),
supports_check_mode=True,
)
name = module.params["name"]
state = module.params["state"]
url = module.params["url"]
timeout = module.params["timeout"]
plugin_bin = module.params["plugin_bin"]
plugin_dir = module.params["plugin_dir"]
version = module.params["version"]
force = module.params["force"]
present = is_plugin_present(parse_plugin_repo(name), plugin_dir)
# skip if the state is correct
if (present and state == "present" and not force) or (state == "absent" and not present and not force):
module.exit_json(changed=False, name=name, state=state)
if (version):
name = name + '/' + version
if state == "present":
if force:
remove_plugin(module, plugin_bin, name)
changed, cmd, out, err = install_plugin(module, plugin_bin, name, url, timeout)
elif state == "absent":
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, url=url, timeout=timeout, stdout=out, stderr=err)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
ClearCorp/server-tools
|
external_file_location/tests/test_ftp.py
|
Python
|
agpl-3.0
| 3,525
| 0
|
# coding: utf-8
# @ 2015 Valentin CHEMIERE @ Akretion
# ©2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import logging
from base64 import b64decode
import hashlib
from .common import TestConnection, ContextualStringIO
from .mock_server import server_mock_ftp
from .mock_server import MultiResponse
from openerp.exceptions import UserError
_logger = logging.getLogger(__name__)
class TestFtpConnection(TestConnection):
def setUp(self):
super(TestFtpConnection, self).setUp()
self.test_file_ftp = ContextualStringIO()
self.test_file_ftp.write('import ftp')
self.test_file_ftp.seek(0)
def test_00_ftp_import(self):
self.task = self.env.ref('external_file_location.ftp_import_task')
with server_mock_ftp(
{'open': self.test_file_ftp,
'listdir': ['test-import-ftp.txt']}):
self.task.run_import()
search_file = self.env['ir.attachment.metadata'].search(
[('name', '=', 'test-import-ftp.txt')])
self.assertEqual(len(search_file), 1)
self.assertEqual(b64decode(search_file[0].datas), 'import ftp')
def test_01_ftp_export(self):
self.task = self.env.ref('external_file_location.ftp_export_task')
self.ftp_attachment = self.env.ref(
'external_file_location.ir_attachment_export_file_ftp')
with server_mock_ftp(
{'setcontents': ''}) as FakeFTP:
self.task.run_export()
if FakeFTP:
self.assertEqual('setcontents', FakeFTP[-1]['method'])
self.assertEqual('done', self.ftp_attachment.state)
self.assertEqual(
'/home/user/test/ftp_test_export.txt',
FakeFTP[-1]['args'][0])
self.assertEqual(
'test ftp file export',
FakeFTP[-1]['kwargs']['data'])
def test_02_ftp_import_md5(self):
md5_file = ContextualStringIO()
md5_file.write(hashlib.md5('import ftp').hexdigest())
md5_file.seek(0)
task = self.env.ref('external_file_location.ftp_import_task')
task.md5_check = True
with server_mock_ftp(
{'open': MultiResponse({
1: md5_file,
0: self.test_file_ftp}),
'listdir': [task.filename]}) as Fakeftp:
task.run_import()
search_file = self.env['ir.attachment.metadata'].search(
(('name', '=', task.filename),))
self.assertEqual(len(search_file), 1)
self.assertEqual(b64decode(search_file[0].datas),
'import ftp')
self.assertEqual('open', Fakeftp[-1]['method'])
self.assertEqual(hashlib.md5('import ftp').hexdigest(),
search_file.external_hash)
def test_03_ftp_import_md5_corrupt_file(self):
md5_file = ContextualStringIO()
md5_file.write(hashlib.md5('import test ftp corrupted').hexdigest())
md5_
|
file.seek(0)
task = self.env.ref('external_file_location.ftp_import_task')
task.md5_check = True
with server_mock_ftp(
{'open': MultiResponse({
1: md5_file,
0: self.test_file_ftp}),
'l
|
istdir': [task.filename]}):
with self.assertRaises(UserError):
task.run_import()
|
antipoachingmap/django-app
|
antipoaching/models.py
|
Python
|
mit
| 909
| 0.018702
|
from __future__ import unicode_literals
from django.db import models
from jsonfield import JSONField
SEVERITY_CHOICES = [('c', "critical"), ('w', "warning"), ('i', "info")]
|
class Event(models.Model):
description = models.TextField(max_length=500, default='')
severity = models.CharField(('severity'), choices=SEVERITY_CHOICES, default='i', max_length=1)
timestamp = models.IntegerField()
lat = models.FloatField(default=0)
long = models.FloatField(default=0)
ext
|
ra = JSONField(null=True, default="")
class Media(models.Model):
description = models.TextField()
format = models.CharField(blank=False, max_length=3)
timestamp = models.IntegerField()
filename = models.CharField(max_length=255)
filesize = models.BigIntegerField()
event = models.ForeignKey(
Event, null=True, related_name='media', on_delete=models.CASCADE,
)
class Meta:
ordering = ('timestamp',)
|
oVirt/vdsm
|
lib/vdsm/metrics/hawkular.py
|
Python
|
gpl-2.0
| 2,524
| 0
|
#
# Copyright 2016-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foun
|
dation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that
|
it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import collections
import logging
import threading
import six
from vdsm.common import compat
from vdsm.common import concurrent
from vdsm.config import config
try:
from hawkular import metrics
except ImportError as e:
raise compat.Unsupported(str(e))
_running = False
_queue = collections.deque(maxlen=config.getint('metrics', 'queue_size'))
_cond = threading.Condition(threading.Lock())
_STOP = object()
def start(address):
global _running
if _running:
raise RuntimeError('trying to start reporter while running')
logging.info("Starting hawkular reporter")
concurrent.thread(_run, name='hawkular', args=(address,)).start()
_running = True
def stop():
logging.info("Stopping hawkular reporter")
with _cond:
_queue.clear()
_queue.append(_STOP)
_cond.notify()
def send(report):
metrics_list = [_get_gauge_metric(name, value)
for name, value in six.iteritems(report)]
_queue.append(metrics_list)
with _cond:
_cond.notify()
def _get_gauge_metric(name, value):
return metrics.create_metric(metrics.MetricType.Gauge, name,
metrics.create_datapoint(float(value)))
def _run(address):
global _running
client = metrics.HawkularMetricsClient(tenant_id="oVirt",
host=address)
while True:
with _cond:
while not _queue:
_cond.wait()
while _queue:
items = _queue.popleft()
if items is _STOP:
break
client.put(items)
_running = False
|
agoragames/py-eventsocket
|
eventsocket.py
|
Python
|
bsd-3-clause
| 20,384
| 0.01771
|
"""
A socket wrapper that uses Event IO.
"""
import socket
import event
import time
import logging
import errno
import traceback
import os
from collections import deque
# TODO: Use new io objects from 2.6
# 26 July 10 - I looked into this and a potential problem with io.StringIO is
# that it assumes all text is unicode. Without a full test and probably lots
# of code updated elsewhere, the older StringIO is probably the better choice
# to fix the bug @AW
# https://agora.lighthouseapp.com/projects/47111/tickets/628-odd-amqp-error
from cStringIO import StringIO
class EventSocket(object):
"""
A socket wrapper which uses libevent.
"""
def __init__( self, family=socket.AF_INET, type=socket.SOCK_STREAM, \
protocol=socket.IPPROTO_IP, read_cb=None, accept_cb=None, \
close_cb=None, error_cb=None, output_empty_cb=None, sock=None, \
debug=False, logger=None, max_read_buffer=0, **kwargs):
"""
Initialize the socket. If no read_cb defined, socket will only be used
for reading. If this socket will be used for accepting new connections,
set read_cb here and it will be passed to new sockets. You can also set
accept_cb and be notified with an EventSocket object on accept(). The
error_cb will be called if there are any errors on the socket. The args
to it will be this socket, an error message, and an optional exception.
The close_cb will be called when this socket closes, with this socket as
its argument. If needed, you can wrap an existing socket by setting the
sock argument to a socket object.
"""
self._debug = debug
self._logger = logger
if self._debug and not self._logger:
print 'WARNING: to debug EventSocket, must provide a logger'
self._debug = False
# There various events we may or may not schedule
self._read_event = None
self._write_event = None
self._accept_event = None
self._connect_event = None
self._pending_read_cb_event = None
# Cache the peername so we can include it in logs even if the socket
# is closed. Note that connect() and bind() have to be the ones to do
# that work.
self._peername = 'unknown'
if sock:
self._sock = sock
try:
self._peername = "%s:%d"%self._sock.getpeername()
# Like connect(), only initialize these if the socket is already connected.
self._read_event = event.read( self._sock, self._protected_cb, self._read_cb )
self._write_event = event.write( self._sock, self._protected_cb, self._write_cb )
except socket.error, e:
# unconnected
pass
else:
self._sock = socket.socket(family, type, protocol)
# wholesale binding of stuff we don't need to alter or intercept
self.listen = self._sock.listen
self.setsockopt = self._sock.setsockopt
self.fileno = self._sock.fileno
self.getpeername = self._sock.getpeername
self.getsockname = self._sock.getsockname
self.getsockopt = self._sock.getsockopt
self.setblocking = self._sock.setblocking # is this correct?
self.settimeout = self._sock.settimeout
self.gettimeout = self._sock.gettimeout
self.shutdown = self._sock.shutdown
self._max_read_buffer = max_read_buffer
#self._write_buf = []
self._write_buf = deque()
#self._read_buf = StringIO()
self._read_buf = bytearray()
self._parent_accept_cb = accept_cb
self._parent_read_cb = read_cb
self._parent_error_cb = error_cb
self._parent_close_cb = close_cb
self._parent_output_empty_cb = output_empty_cb
# This is the pending global error message. It's sort of a hack, but it's
# used for __protected_cb in much the same way as errno. This prevents
# having to pass an error message around, when the best way to do that is
# via kwargs that the event lib is itself trying to interpret and won't
# allow to pass to __protected_cb.
self._error_msg = None
self._closed = False
self._inactive_event = None
self.set_inactive_timeout( 0 )
@property
def closed(self):
'''
Return whether this socket is closed.
'''
return self._closed
def close(self):
"""
Close the socket.
"""
# if self._debug:
# self._logger.debug(\
# "closing connection %s to %s"%(self._sock.getsockname(), self._peername) )
# Unload all our events
if self._read_event:
self._read_event.delete()
self._read_event = None
if self._accept_event:
self._accept_event.delete()
self._accept_event = None
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
if self._write_event:
self._write_event.delete()
self._write_event = None
if self._connect_event:
self._connect_event.delete()
self._connect_event = None
if self._sock:
self._sock.close()
self._sock = None
# Flush any pending data to the read callbacks as appropriate. Do this
# manually as there is a chance for the following race condition to occur:
# pending data read by cb
# callback reads 1.1 messages, re-buffers .1 msg back
# callback disconnects from socket based on message, calling close()
# we get back to this code and find there's still data in the input buffer
# and the read cb hasn't been cleared. ruh roh.
#if self._parent_read_cb and self._read_buf.tell()>0:
if self._parent_read_cb and len(self._read_buf)>0:
cb = self._parent_read_cb
self._parent_read_cb = None
self._error_msg = "error processing remaining socket input buffer"
self._protected_cb( cb, self )
# Only mark as closed after socket is really closed, we've flushed buffered
# input, and we're calling back to close handlers.
self._closed = True
if self._parent_close_cb:
self._parent_close_cb( self )
if self._pending_read_cb_event:
self._pending_read_cb_event.delete()
self._pending_read_cb_event = None
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
# Delete references to callbacks to help garbage collection
self._parent_accept_cb = None
self._parent_read_cb = None
self._parent_error_cb = None
self._parent_close_cb = None
self._parent_output_empty_cb = None
# Clear buffers
self._write_buf = None
self._read_buf = None
def accept(self):
"""
No-op as we no longer perform blocking accept calls.
"""
pass
def _set_read_cb(self, cb):
"""
Set the read callback. If there's data in the output buffer, immediately
setup a call.
"""
self._parent_read_cb = cb
#if self._read_buf.tell()>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None:
if len(self._read_buf)>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None:
self._pending_read_cb_event = \
event.timeout( 0, self._protected_cb, self._parent_read_timer_cb )
# Allow someon
|
e to change the various callbacks.
read_cb = property( fset=_set_read_cb )
accept_cb = property( fset=lambda self,func: setattr(self, '_parent_accept_cb', func ) )
close_cb = property( fset=lambda self,func: setattr(self, '_parent_close_cb', func ) )
error_cb = property(
|
fset=lambda self,func: setattr(self, '_parent_error_cb', func ) )
output_empty_cb = property( fset=lambda self,func: setattr(self, '_parent_output_empty_cb',func) )
def bind(self, *args):
"""
Bind the socket.
"""
if self._debug:
self._logger.debug( "binding to %s", str(args) )
self._sock.bind( *args )
self._peername = "%s:%d"%self.getsockname()
self._accept_event = event.read( self, self._protected_cb, self._accept_cb )
def connect(self, *args, **kwargs):
'''
Connect to the socket. If currently non-blocking, will return immediately
and call close_cb when the timeout is reached. If timeout_at is a float,
will wait until that time and then call the close_cb. Otherwise, it will
set timeout_at as time()+timeout, where timeout is a float argument o
|
luzfcb/cookiecutter
|
cookiecutter/zipfile.py
|
Python
|
bsd-3-clause
| 4,640
| 0
|
"""Utility functions for handling and fetching repo archives in zip format."""
from __future__ import absolute_import
import os
import tempfile
from zipfile import ZipFile
import requests
try:
# BadZipfile was renamed to BadZipFile in Python 3.2.
from zipfile import BadZipFile
except ImportError:
from zipfile import BadZipfile as BadZipFile
from cookiecutter.exceptions import I
|
nvalidZipRepository
from cookiecutter.prompt import read_repo_password
from cookiecutter.utils import make_sure_path_exists, prompt_and_delete
def unzip(zip_uri, is_url, clone_to_dir='.', no_input=False, password=None):
"""Download and unpack a zipfile at a given URI.
Th
|
is will download the zipfile to the cookiecutter repository,
and unpack into a temporary directory.
:param zip_uri: The URI for the zipfile.
:param is_url: Is the zip URI a URL or a file?
:param clone_to_dir: The cookiecutter repository directory
to put the archive into.
:param no_input: Suppress any prompts
:param password: The password to use when unpacking the repository.
"""
# Ensure that clone_to_dir exists
clone_to_dir = os.path.expanduser(clone_to_dir)
make_sure_path_exists(clone_to_dir)
if is_url:
# Build the name of the cached zipfile,
# and prompt to delete if it already exists.
identifier = zip_uri.rsplit('/', 1)[1]
zip_path = os.path.join(clone_to_dir, identifier)
if os.path.exists(zip_path):
download = prompt_and_delete(zip_path, no_input=no_input)
else:
download = True
if download:
# (Re) download the zipfile
r = requests.get(zip_uri, stream=True)
with open(zip_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
else:
# Just use the local zipfile as-is.
zip_path = os.path.abspath(zip_uri)
# Now unpack the repository. The zipfile will be unpacked
# into a temporary directory
try:
zip_file = ZipFile(zip_path)
if len(zip_file.namelist()) == 0:
raise InvalidZipRepository(
'Zip repository {} is empty'.format(zip_uri)
)
# The first record in the zipfile should be the directory entry for
# the archive. If it isn't a directory, there's a problem.
first_filename = zip_file.namelist()[0]
if not first_filename.endswith('/'):
raise InvalidZipRepository(
'Zip repository {} does not include '
'a top-level directory'.format(zip_uri)
)
# Construct the final target directory
project_name = first_filename[:-1]
unzip_base = tempfile.mkdtemp()
unzip_path = os.path.join(unzip_base, project_name)
# Extract the zip file into the temporary directory
try:
zip_file.extractall(path=unzip_base)
except RuntimeError:
# File is password protected; try to get a password from the
# environment; if that doesn't work, ask the user.
if password is not None:
try:
zip_file.extractall(
path=unzip_base,
pwd=password.encode('utf-8')
)
except RuntimeError:
raise InvalidZipRepository(
'Invalid password provided for protected repository'
)
elif no_input:
raise InvalidZipRepository(
'Unable to unlock password protected repository'
)
else:
retry = 0
while retry is not None:
try:
password = read_repo_password('Repo password')
zip_file.extractall(
path=unzip_base,
pwd=password.encode('utf-8')
)
retry = None
except RuntimeError:
retry += 1
if retry == 3:
raise InvalidZipRepository(
'Invalid password provided '
'for protected repository'
)
except BadZipFile:
raise InvalidZipRepository(
'Zip repository {} is not a valid zip archive:'.format(zip_uri)
)
return unzip_path
|
unioslo/cerebrum
|
Cerebrum/modules/dns/__init__.py
|
Python
|
gpl-2.0
| 561
| 0.003565
|
# -*- coding: utf-8 -*-
import cereconf
__version__ = "1.5"
IP_NUMBER = 'IPNumber'
IPv6_NUMBER = 'IPv6Number'
DNS_OWNER='DnsOwner'
REV_IP_NUMBER = 'IPNumber_rev'
A_RECORD = 'ARecord'
AAAA_RECORD = 'AAAARecord'
HOST_INFO = 'HostInfo'
MX_SET = 'MXSet'
SRV_TARGET = "SRV_target"
SRV_OWNER = "SRV_owner"
GENERAL_DNS_REC
|
ORD = "GeneralDnsRecord"
CNAME_OWNER = "Cname_owner"
CNAME_TARGET = "Cname_target"
# TODO: This value should not be hardcoded here. Didn't put it in
# cereco
|
nf as the zone support for dns_owner should be here "real soon
# now"
ZONE='uio.no'
|
k-rister/pbench
|
server/bin/pbench-base.py
|
Python
|
gpl-3.0
| 2,404
| 0.00624
|
#!/usr/bin/env python3
# -*- mode: python -*-
import os, sys
if __name__ != '__main__':
sys.exit(1)
from argparse import ArgumentParser
_NAME_ = "pbench-base.py"
parser = ArgumentParser(_NAME_)
parser.add_argument(
"-C", "--config", dest="cfg_name",
help="Specify config file")
parser.set_defaults(cfg_name = os.environ.get("CONFIG"))
parser.add_argument('prog', metavar='PROG', type=str, nargs=1,
help='the program name of the caller')
parser.add_argument('args', metavar='args', type=str, nargs='*',
help='program arguments')
parsed, _ = parser.parse_known_args()
_prog = os.path.basename(parsed.prog[0])
_dir = os.path.dirname(parsed.prog[0])
if not parsed.cfg_name:
# pbench-base.py is not always invoked with -C or --config or the CONFIG
# environment variable set. Since we really need access to the config
# file to operate, and we know the relative location of that config file,
# we check to see if that exists before declaring a problem.
config_name = os.path.join(os.path.dirname(_dir), "lib", "config",
"pbench-server.cfg")
if not os.path.exists(config_name):
print("{}: No config file specified: set CONFIG env variable or use"
" --config <file> on the command line".format(_prog),
file=sys.stderr)
sys.exit(1)
else:
config_name = parsed.cfg_name
# Export all the expected pbench config file attributes for the
# existing shell scripts. This maintains the single-source-of-
# trut
|
h for those definitions in the PbenchConfig class, but
# still accessible to all pbench bash shell scripts.
from pbench import PbenchConfig, BadConfig
try:
config = PbenchConfig(config_name)
except BadConfig as e:
print("{}: {}".format(_prog, e), file=sys.stderr)
sys.exit(1)
# Exclude the "fi
|
les" and "conf" attributes from being exported
vars = sorted([ key for key in config.__dict__.keys() \
if key not in ('files', 'conf', 'timestamp', '_unittests', 'get') ])
for att in vars:
try:
os.environ[att] = getattr(config, att)
except AttributeError:
print("{}: Missing internal pbench attribute, \"{}\", in"
" configuration".format(_prog, att), file=sys.stderr)
sys.exit(1)
if config._unittests:
os.environ['_PBENCH_SERVER_TEST'] = "1"
cmd = "{}.sh".format(sys.argv[1])
args = [ cmd ] + sys.argv[2:]
os.execv(cmd, args)
|
fboers/jumegX
|
tsvgl/old/jumeg_tsv_wx02.py
|
Python
|
bsd-3-clause
| 11,636
| 0.019337
|
import sys
import numpy as np
#from OpenGL.GL import *
#from OpenGL.GLU import *
try:
import wx
from wx import glcanvas
from wx.glcanvas import GLCanvas
except ImportError:
raise ImportError, "Required dependency wx.glcanvas not present"
try:
from OpenGL.GL import *
from OpenGL.GLU import gluOrtho2D
except ImportError:
raise ImportError, "Required dependency OpenGL not present"
import numpy as np
from jumeg.tsv.jumeg_tsv_vob import JuMEGVertexBuffer
class JuMEGPlot2D(GLCanvas):
def __init__(self, parent):
attribList = (glcanvas.WX_GL_RGBA, # RGBA
glcanvas.WX_GL_DOUBLEBUFFER, # Double Buffered
glca
|
nvas.WX_GL_DEPTH_SIZE, 24) # 24 bit
GLCanvas.__init__(self, parent, -1, attribList=attribList)
wx.EVT_PAINT(self, self.OnPaint)
wx.EVT_SIZE(self, self.OnSize)
#wx.EVT_MOTION(self, self.OnMouseMotion)
wx.EVT_LEFT_DOWN(self, self.OnMouseLeftDown)
wx.EVT_LEFT_UP(self, self.OnMouseLeftUp)
wx.EVT_ERASE_BACKGROUND(self, lambda e: None)
# wx.EVT_CLOSE(self, self.OnClose)
# wx.EVT_CHAR(self, self.OnKeyDown)
self
|
.n_channels=10
self.n_timepoints= 10000
self.SetFocus()
self.GLinitialized = False
self.init = False
self.rotation_y = 0.0
self.rotation_x = 0.0
self.prev_y = 0
self.prev_x = 0
self.mouse_down = False
self.is_on_draw =False
self.width = 400
self.height = 400
#Sizer = wx.BoxSizer(wx.VERTICAL)
#Sizer.Add(self.canvas, 1, wx.EXPAND|wx.ALL, 5)
#self.SetSizerAndFit(Sizer)
self.srate=1024.15
self.data = None
self.timepoints=None
self.vbo_id=0
self._init_data()
# self.vbo = VertexBuffer(self.data_vbo)
# print "OK"
# self.vbo = JuMEGVertexBuffer(self.data_vbo)
self.vbo = JuMEGVertexBuffer()
def _init_data(self):
import numpy as np
ch=self.n_channels
n = self.n_timepoints
self.timepoints = np.arange(n) / self.srate
self.data = np.zeros((ch,n), dtype=np.float32)
#self.data = np.sin( 2 *np.pi + self.timepoints)
print"start calc"
for i in range( ch ):
#self.data[i,:] = np.sin(self.timepoints * (10.0 + i) + (10 *i*np.pi) ) / ( 1.0 + self.timepoints * self.timepoints ) +np.sin( self.timepoints * 0.2* 2*np.pi)
self.data[i,:] = np.sin(self.timepoints * (2 * i+1) * 2* np.pi)
self.data_4_vbo = np.zeros((n,2), dtype=np.float32).flatten()
#self.data_vbo[:,0] = self.timepoints
#self.data_vbo[:,1] = self.data[-1,:]
print"done calc"
self.data_4_vbo_tp = self.data_4_vbo[0:-1:2]
self.data_4_vbo_sig = self.data_4_vbo[1::2]
self.data_4_vbo_sig[:] = self.data[0,:]
self.data_4_vbo_tp[:] = self.timepoints
# graph[i].x = x;
# graph[i].y = sin(x * 10.0) / (1.0 + x * x);
#my $data_4_vbo = pdl( zeroes(2,$data->dim(-1) ) )->float();
#my $data_4_vbo_timepoints = $data_4_vbo->slice("(0),:");
#my $data_4_vbo_signal = $data_4_vbo->slice("(1),:");
# $data_4_vbo_timepoints .= $datax; #$self->xdata();
#my $data_vbo = $data_4_vbo->flat;
def set_window(self,l,r,b,t):
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluOrtho2D(l,r,b,t)
def set_viewport(self,l,r,b,t):
glViewport(l,b,r-l,t-b)
def OnMouseLeftDown(self, event):
self.mouse_down = True
self.prev_x = event.GetX()
self.prev_y = event.GetY()
print"MLD"
print self.prev_x
print self.prev_y
def OnMouseLeftUp(self, event):
self.mouse_down = False
print"MLU"
# Canvas Proxy Methods
def GetGLExtents(self):
"""Get the extents of the OpenGL canvas."""
return self.GetClientSize()
#def SwapBuffers(self):
# """Swap the OpenGL buffers."""
# #self.canvas.SwapBuffers()
# self.SwapBuffers()
#
# wxPython Window Handlers
#def processEraseBackgroundEvent(self, event):
# """Process the erase background event."""
# pass # Do nothing, to avoid flashing on MSWin
def OnSize(self, event):
"""Process the resize event."""
if self.GetContext():
# Make sure the frame is shown before calling SetCurrent.
#self.Show()
self.SetCurrent()
size = self.GetGLExtents()
self.OnReshape(size.width, size.height)
self.Refresh(False)
event.Skip()
def OnPaint(self, event):
"""Process the drawing event."""
self.SetCurrent()
# This is a 'perfect' time to initialize OpenGL ... only if we need to
if not self.GLinitialized:
self.OnInitGL()
self.GLinitialized = True
self.OnDraw()
event.Skip()
#
# GLFrame OpenGL Event Handlers
def OnInitGL(self):
"""Initialize OpenGL for use in the window."""
glClearColor(1, 1, 1, 1)
def OnReshape(self, width, height):
"""Reshape the OpenGL viewport based on the dimensions of the window."""
self.set_viewport(0,width,0, height)
def OnDraw(self, *args, **kwargs):
"Draw the window."
if self.is_on_draw:
return
self.is_on_draw = True
#self.vbo = VertexBuffer(self.data_vbo)
size = self.GetGLExtents()
#--- reshape
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glClearColor(1.0,1.0,1.0,0.0)
glLineWidth(2)
# self.set_viewport(0,width,0, height)
#glMatrixMode(GL_MODELVIEW)
#glLoadIdentity()
xmin=self.timepoints[0]
xmax=self.timepoints[-1]
#---start sub plots
w0 = 10
w1 = size.width-10
h0 = 0
dh = int( size.height / self.data.shape[0] );
h1 = dh
ymin=-1.0
ymax=1.0
dpos = ymin + (ymax - ymin) / 2.0
glColor3f(0.0,0.0,1.0)
glLineWidth(2)
glColor3f(0,0,1)
self.data_4_vbo_sig[:] = self.data[0,:]
self.vbo.data = self.data_4_vbo
print self.vbo.data_buffer_size
self.vbo.vbo_init()
for idx in range( self.n_channels ):
# glColor3f(0.0,0.0,1.0)
self.set_viewport(w0,w1,h0,h1)
#ymin = self.data[idx,:].min()
#ymax = self.data [idx,:].max()
#dpos = ymin + (ymax - ymin) / 2.0
self.set_window(xmin,xmax,ymin,ymax )
#--- draw zero line
# glLineWidth(1)
# glColor3f(0,0,0)
# glColor3f(0.4,0.4,0.4)
# glBegin(GL_LINES)
# glVertex2f(xmin,0.0)
# glVertex2f(xmax,0.0)
# glEnd()
# glBegin(GL_LINES)
# glVertex2f(xmin,dpos)
# glVertex2f(xmax,dpos)
# glEnd();
#glRasterPos2f(xmin,dpos)
#--- plot signal
# glLineWidth(2)
# glColor3f(0,0,1)
#--- create OGL verts buffer
# glDisableClientState(GL_VERTEX_ARRAY)
# self.data_vbo[:,0] = self.timepoints
self.data_4_vbo_sig[:] = self.data[idx,:]
self.vbo.data = self.data_4_vbo
self.vbo.vbo_update()
# self.vbo.data = self.data_vbo
self.vbo.vbo_draw()
h0 += dh
h1 += dh + 1
# glBufferSubDataARB_p(GL_ARRAY_BUFFER_ARB,0,$ogl_array);
glFlush();
self.SwapBuffers()
self.is_on_draw=False
self.vbo.vbo_reset()
class JuMEG_TSV_MainFrame(wx.Frame):
"""JuMEG TSV wxProject MainFrame."""
def __init__(self, parent,title="JuMEG TSV",id=wx.ID_ANY,
pos=wx.DefaultPosition,size=wx.DefaultSize,style=wx.DEFAULT_FRAME_STYLE,name="MainWindow"):
super(JuMEG_TSV_MainFrame, self).__init__(parent,id, title, pos, size, style, name)
#--- Options Plot/Time/Channels
self._ID_OPT_PLOT = 10111
self._ID_OPT_TIME =
|
openprocurement/openprocurement.auction
|
openprocurement/auction/design.py
|
Python
|
apache-2.0
| 2,643
| 0.001135
|
# -*- coding: utf-8 -*-
from couchdb.design import ViewDefinition
from couchdb.http import HTTPError
from time import sleep
from random import randint
import os
def add_index_options(doc):
doc['options'] = {'local_seq': True}
start_date_chronograph = ViewDefinition(
'chronograph',
'start_date',
open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'design_files/start_date.js')).read()
)
def sync_design_chronograph(db):
views = [start_date_chronograph]
ViewDefinition.sync_many(db, views, remove_missing=True, callback=add_index_options)
endDate_view = ViewDefinition(
'auctions',
'by_endDate',
''' function(doc) {
var end = new Date(doc.endDate||doc.stages[0].start).getTime()
emit(end, null);
}
'''
)
startDate_view = ViewDefinition(
'auctions',
'by_startDate',
''' function(doc) {
var start = new Date(doc.stages[0].start).getTime()
emit(start, null);
}
'''
)
PreAnnounce_view = ViewDefinition(
'auctions',
'PreAnnounce',
''' function(doc) {
if ((doc.stages.length - 2) == doc.current_stage){
emit(null, null);
}
}
'''
)
def sync_design(db):
views = [endDate_view, startDate_view, PreAnnounce_view]
for view in views:
view.sync(db)
while True:
design = db.get('_design/auctions')
if not design:
design = {'_id': '_design/auctions'}
validate_doc_update = '''
function(newDoc, oldDoc, userCtx, secObj) {
if (userCtx.roles.indexOf('_admin') !== -1) {
return true;
} else {
throw({forbidden: 'Only valid user
|
may change docs.'});
}
}
'''
start_date_filter = '''function(doc, req) {
|
var now = new Date();
var start = new Date(((doc.stages||[])[0]||{}).start || '2000');
if (start > now){
return true;
}
return false;
}
'''
if 'validate_doc_update' not in design or \
validate_doc_update != design['validate_doc_update'] or \
start_date_filter != design.get('filters', {}).get('by_startDate'):
design['validate_doc_update'] = validate_doc_update
design['filters'] = design.get('filters', {})
design['filters']['by_startDate'] = start_date_filter
try:
return db.save(design)
except HTTPError:
sleep(randint(0, 2000) / 1000.0)
else:
return
|
ubic135/odoo-design
|
addons/website/models/website.py
|
Python
|
agpl-3.0
| 36,133
| 0.003709
|
# -*- coding: utf-8 -*-
import cStringIO
import contextlib
import datetime
import hashlib
import inspect
import logging
import math
import mimetypes
import unicodedata
import os
import re
import time
import urlparse
from PIL import Image
from sys import maxint
import werkzeug
# optional python-slugify import (https://github.com/un33k/python-slugify)
try:
import slugify as slugify_lib
except ImportError:
slugify_lib = None
import openerp
from openerp.osv import orm, osv, fields
from openerp.tools import html_escape as escape, ustr, image_resize_and_sharpen, image_save_for_web
from openerp.tools.safe_eval import safe_eval
from openerp.addons.web.http import request
from werkzeug.exceptions import NotFound
logger = logging.getLogger(__name__)
def url_for(path_or_uri, lang=None):
if isinstance(path_or_uri, unicode):
path_or_uri = path_or_uri.encode('utf-8')
current_path = request.httprequest.path
if isinstance(current_path, unicode):
current_path = current_path.encode('utf-8')
location = path_or_uri.strip()
force_lang = lang is not None
url = urlparse.urlparse(location)
if request and not url.netloc and not url.scheme and (url.path or force_lang):
location = urlparse.urljoin(current_path, location)
lang = lang or request.context.get('lang')
langs = [lg[0] for lg in request.website.get_languages()]
if (len(langs) > 1 or force_lang) and is_multilang_url(location, langs):
ps = location.split('/')
if ps[1] in langs:
# Replace the language only if we explicitly provide a language to url_for
if force_lang:
ps[1] = lang
# Remove the default language unless it's explicitly provided
elif ps[1] == request.website.default_lang_code:
ps.pop(1)
# Insert the context language or the provided language
elif lang != request.website.default_lang_code or force_lang:
ps.insert(1, lang)
location = '/'.join(ps)
return location.decode('utf-8')
def is_multilang_url(local_url, langs=None):
if not langs:
langs = [lg[0] for lg in request.website.get_languages()]
spath = local_url.split('/')
# if a language is already in the path, remove it
if spath[1] in langs:
spath.pop(1)
local_url = '/'.join(spath)
try:
# Try to match an endpoint in werkzeug's routing table
url = local_url.split('?')
path = url[0]
query_string = url[1] if len(url) > 1 else None
router = request.httprequest.app.get_db_router(request.db).bind('')
func = router.match(path, query_args=query_string)[0]
return func.routing.get('website', False) and func.routing.get('multilang', True)
except Exception:
return False
def slugify(s, max_length=None):
""" Transform a string to a slug that can be used in a url path.
This method will first try to do the job with python-slugify if present.
Otherwise it will process string by stripping leading and ending spaces,
converting unicode chars to ascii, lowering all chars and replacing spaces
and underscore with hyphen "-".
:param s: str
:param max_length: int
:rtype: str
"""
s = ustr(s)
if slugify_lib:
# There are 2 different libraries only python-slugify is supported
try:
return slugify_lib.slugify(s, max_length=max_length)
except TypeError:
pass
uni = unicodedata.normalize('NFKD', s).encode('ascii', 'ignore').decode('ascii')
slug = re.sub('[\W_]', ' ', uni).strip().lower()
slug = re.sub('[-\s]+', '-', slug)
return slug[:max_length]
def slug(value):
if isinstance(value, orm.browse_record):
# [(id, name)] = value.name_get()
id, name = value.id, value.display_name
else:
# assume name_search result tuple
id, name = value
slugname = slugify(name or '').strip().strip('-')
if not slugname:
return str(id)
return "%s-%d" % (slugname, id)
# NOTE: as the pattern is used as it for the ModelConverter (ir_http.py), do not use any flags
_UNSLUG_RE = re.compile(r'(?:(\w{1,2}|\w[A-Za-z0-9-_]+?\w)-)?(-?\d+)(?=$|/)')
DEFAULT_CDN_FILTERS = [
"^/[^/]+/static/",
"^/web/(css|js)/",
"^/website/image/",
]
def unslug(s):
"""Extract slug and id from a string.
Always return un 2-tuple (str|None, int|None)
"""
m = _UNSLUG_RE.match(s)
if not m:
return None, None
return m.group(1), int(m.group(2))
def urlplus(url, params):
return werkzeug.Href(url)(params or None)
class website(osv.osv):
def _get_menu(self, cr, uid, ids, name, arg, context=None):
res = {}
menu_obj = self.pool.get('website.menu')
for id in ids:
menu_ids = menu_obj.search(cr, uid, [('parent_id', '=', False), ('website_id', '=', id)], order='id', context=context)
res[id] = menu_ids and menu_ids[0] or False
return res
_name = "website" # Avoid website.website convention for conciseness (for new api). Got a special authorization from xmo and rco
_description = "Website"
_columns = {
'name': fields.char('Website Name'),
'domain': fields.ch
|
ar('Website Domain'),
'company_id': fields.many2one('res
|
.company', string="Company"),
'language_ids': fields.many2many('res.lang', 'website_lang_rel', 'website_id', 'lang_id', 'Languages'),
'default_lang_id': fields.many2one('res.lang', string="Default language"),
'default_lang_code': fields.related('default_lang_id', 'code', type="char", string="Default language code", store=True),
'social_twitter': fields.char('Twitter Account'),
'social_facebook': fields.char('Facebook Account'),
'social_github': fields.char('GitHub Account'),
'social_linkedin': fields.char('LinkedIn Account'),
'social_youtube': fields.char('Youtube Account'),
'social_googleplus': fields.char('Google+ Account'),
'google_analytics_key': fields.char('Google Analytics Key'),
'user_id': fields.many2one('res.users', string='Public User'),
'compress_html': fields.boolean('Compress HTML'),
'cdn_activated': fields.boolean('Activate CDN for assets'),
'cdn_url': fields.char('CDN Base URL'),
'cdn_filters': fields.text('CDN Filters', help="URL matching those filters will be rewritten using the CDN Base URL"),
'partner_id': fields.related('user_id','partner_id', type='many2one', relation='res.partner', string='Public Partner'),
'menu_id': fields.function(_get_menu, relation='website.menu', type='many2one', string='Main Menu')
}
_defaults = {
'user_id': lambda self,cr,uid,c: self.pool['ir.model.data'].xmlid_to_res_id(cr, openerp.SUPERUSER_ID, 'base.public_user'),
'company_id': lambda self,cr,uid,c: self.pool['ir.model.data'].xmlid_to_res_id(cr, openerp.SUPERUSER_ID,'base.main_company'),
'compress_html': False,
'cdn_activated': False,
'cdn_url': '//localhost:8069/',
'cdn_filters': '\n'.join(DEFAULT_CDN_FILTERS),
}
# cf. Wizard hack in website_views.xml
def noop(self, *args, **kwargs):
pass
def write(self, cr, uid, ids, vals, context=None):
self._get_languages.clear_cache(self)
return super(website, self).write(cr, uid, ids, vals, context)
def new_page(self, cr, uid, name, template='website.default_page', ispage=True, context=None):
context = context or {}
imd = self.pool.get('ir.model.data')
view = self.pool.get('ir.ui.view')
template_module, template_name = template.split('.')
# completely arbitrary max_length
page_name = slugify(name, max_length=50)
page_xmlid = "%s.%s" % (template_module, page_name)
try:
# existing page
imd.get_object_reference(cr, uid, template_module, page_name)
except ValueError:
# new page
_, template_id = imd.get_object_reference(cr, uid, template_modul
|
mikeckennedy/python-jumpstart-course-demos
|
apps/10_movie_search/final/movie_svc.py
|
Python
|
mit
| 715
| 0
|
import collections
import requests
MovieResult = collections.namedtuple(
'MovieResult',
"imdb_code,title,duration,director,year,rating,imdb_score,keywords,genres")
def find_movies(search_text):
if not search_text or not search_text.strip():
raise ValueError("Search text is required")
|
# This URL changed since the recording to support SSL.
url = 'http://movieservice.talkpython.fm/api/search/{}'.format(search_text)
resp = requests.get(url)
resp.raise_for_status()
movie_data = resp.json()
movi
|
es_list = movie_data.get('hits')
movies = [
MovieResult(**md)
for md in movies_list
]
movies.sort(key=lambda m: -m.year)
return movies
|
AguNnamdi/flask_microblog
|
app/auth/views.py
|
Python
|
mit
| 5,982
| 0.003176
|
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, current_user, logout_user, login_required
from . import auth
from ..models import User, AnonymousUser
from .forms import LoginForm, RegistrationForm, ChangePasswordForm, \
PasswordResetRequestForm, PasswordResetForm, ChangeEmailForm
from .. import db
from ..email import send_email
@auth.before_app_request
def before_request():
if current_user.is_authenticated(): current_user.ping()
if not current_user.confirmed and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous() or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('auth/unconfirmed.html')
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('auth.login'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, 'Confirm Your Account',
'auth/email/confirm', user=current_user, token=token)
flash('A
|
new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
@auth.route('/change_password', methods=['GET', 'POST'])
@login_r
|
equired
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
current_user.password = form.password.data
db.session.add(current_user)
flash('Your password has been updated.')
return redirect(url_for('main.index'))
else:
flash('Invalid password.')
return render_template("auth/change_password.html", form=form)
@auth.route('/reset', methods=['GET', 'POST'])
def password_reset_request():
if not current_user.is_anonymous():
return redirect(url_for('main.index'))
form = PasswordResetRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
token = user.generate_reset_token()
send_email(user.email, 'Reset Your Password',
'auth/email/reset_password',
user=user, token=token,
next=request.args.get('next'))
flash('An email with instructions to reset your password has been '
'sent to you.')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if not current_user.is_anonymous():
return redirect(url_for('main.index'))
form = PasswordResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is None:
return redirect(url_for('main.index'))
if user.reset_password(token, form.password.data):
flash('Your password has been updated.')
return redirect(url_for('auth.login'))
else:
return redirect(url_for('main.index'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/change_email', methods=['GET', 'POST'])
@login_required
def change_email_request():
form = ChangeEmailForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
new_email = form.email.data
token = current_user.generate_email_change_token(new_email)
send_email(new_email, 'Confirm your email address',
'auth/email/change_email',
user=current_user, token=token)
flash('An email with instructions to confirm your new email '
'address has been sent to you.')
return redirect(url_for('main.index'))
else:
flash('Invalid email or password.')
return render_template("auth/change_email.html", form=form)
@auth.route('/change_email/<token>')
@login_required
def change_email(token):
if current_user.change_email(token):
flash('Your email address has been updated.')
else:
flash('Invalid request.')
return redirect(url_for('main.index'))
|
bilbeyt/ITURO-Giant_Flat
|
flat/server.py
|
Python
|
mit
| 254
| 0
|
#!/u
|
sr/bin/env python
import os
import sys
from swampdragon.swampdragon_server import run_server
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "flat.settings")
host_port = sys.argv[1] if len(sys.argv) > 1 else None
run_server(host_port=host_port)
| |
jmaas/cobbler
|
cobbler/modules/installation/post_puppet.py
|
Python
|
gpl-2.0
| 1,712
| 0.002336
|
"""
This module signs newly installed client puppet certificates if the
puppet master server is running on the same machine as the cobbler
server.
Based on:
http://www.ithiriel.com/content/2010/03/29/writing-install-triggers-cobbler
"""
from builtins import str
import re
import cobbler.utils as utils
def register():
# this pure python trigger acts as if it were a legacy shell-trigger, but is much faster.
# the return of this method indicates the trigger type
return "/var/lib/cobbler/triggers/install/post/*"
def run(api, args, logger):
objtype = args[0] # "system" or "profile"
name = args[1] # name of system or profile
# ip = args[2] # ip or "?"
if objtype != "system":
return 0
settings = api.settings()
if not str(settings.puppet_auto_setup).lower() in ["1", "yes", "y", "true"]:
return 0
if not str(settings.sign_puppet_certs_automatically).lower() in ["1", "yes", "y", "true"]:
return 0
system = api.find_system(name)
system = utils.blender(api, False, system)
hostname = syste
|
m["hostname"]
if not re.match(r'[\w-]+\..+', hostname):
search_domains = system['name_servers_search']
if search_domains:
hostname += '.' + searc
|
h_domains[0]
puppetca_path = settings.puppetca_path
cmd = [puppetca_path, 'cert', 'sign', hostname]
rc = 0
try:
rc = utils.subprocess_call(logger, cmd, shell=False)
except:
if logger is not None:
logger.warning("failed to execute %s" % puppetca_path)
if rc != 0:
if logger is not None:
logger.warning("signing of puppet cert for %s failed" % name)
return 0
|
tapomayukh/projects_in_python
|
sandbox_tapo/src/skin_related/BMED_8813_HAP/Features/single_feature/results/cross_validate_categories_BMED_8813_HAP_scaled_method_II_force.py
|
Python
|
mit
| 4,029
| 0.018119
|
# Principal Component Analysis Code :
from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud
from pylab import *
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import pickle
from mvpa.clfs.knn import kNN
from mvpa.datasets import Dataset
from mvpa.clfs.transerror import TransferError
from mvpa.misc.data_generators import normalFeatureDataset
from mvpa.algorithms.cvtranserror import CrossValidatedTransferError
from mvpa.datasets.splitters import NFoldSplitter
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/BMED_8813_HAP/Data')
from data import Fmat_original
def pca(X):
#get dimensions
num_data,dim = X.shape
#center data
mean_X = X.mean(axis=1)
M = (X-mean_X) # subtract the mean (along columns)
Mcov = cov(M)
###### Sanity Check ######
i=0
n=0
while i < 41:
j=0
while j < 90:
if X[i,j] != X[i,j]:
print X[i,j]
print i,j
n=n+1
j = j+1
i=i+1
print n
##########################
print 'PCA - COV-Method used'
val,vec = linalg.eig(Mcov)
#return the
|
projection matrix, the variance and the mean
return vec,val,mean_X, M, Mcov
if __name__ == '__main__':
Fmat = Fmat_o
|
riginal[82:123,:]
# Checking the Data-Matrix
m_tot, n_tot = np.shape(Fmat)
print 'Total_Matrix_Shape:',m_tot,n_tot
eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat)
#print eigvec_total
#print eigval_total
#print mean_data_total
m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total))
m_eigvec_total, n_eigvec_total = np.shape(eigvec_total)
m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total))
print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total
print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total
print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total
#Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used.
perc_total = cumsum(eigval_total)/sum(eigval_total)
# Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure)
W = eigvec_total[:,0:6]
m_W, n_W = np.shape(W)
print 'Reduced Dimension Eigenvector Shape:',m_W, n_W
#Projected Data:
Y = (W.T)*B
m_Y, n_Y = np.shape(Y.T)
print 'Transposed Projected Data Shape:', m_Y, n_Y
#Using PYMVPA
PCA_data = np.array(Y.T)
PCA_label_1 = ['Edge-1']*30 + ['Surface']*30 + ['Edge-2']*30
PCA_chunk_1 = ['Can-Edge-1']*5 + ['Book-Edge-1']*5 + ['Brown-Cardboard-Box-Edge-1']*5 + ['Cinder-Block-Edge-1']*5 + ['Tin-Box-Edge-1']*5 + ['White-Cardboard-Box-Edge-1']*5 + ['Can-Surface']*5 + ['Book-Surface']*5 + ['Brown-Cardboard-Box-Surface']*5 + ['Cinder-Block-Surface']*5 + ['Tin-Box-Surface']*5 + ['White-Cardboard-Box-Surface']*5 + ['Can-Edge-2']*5 + ['Book-Edge-2']*5 + ['Brown-Cardboard-Box-Edge-2']*5 + ['Cinder-Block-Edge-2']*5 + ['Tin-Box-Edge-2']*5 + ['White-Cardboard-Box-Edge-2']*5
clf = kNN(k=2)
terr = TransferError(clf)
ds1 = Dataset(samples=PCA_data,labels=PCA_label_1,chunks=PCA_chunk_1)
print ds1.samples.shape
cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion'])
error = cvterr(ds1)
print error
print cvterr.confusion.asstring(description=False)
figure(1)
cvterr.confusion.plot(numbers='True')
# Variances
figure(2)
title('Variances of PCs')
stem(range(len(perc_total)),perc_total,'--b')
axis([-0.3,30.3,0,1.2])
grid('True')
show()
|
Bovid/Bovid.python
|
template.py
|
Python
|
mit
| 7,644
| 0.039508
|
import collections
import re
#imports
class Jison:#extends
symbols = {}
terminals = {}
productions = {}
table = {}
default_actions = {}
version = '0.3.12'
debug = False
action_none = 0
action_shift = 1
action_deduce = 2
action_accept = 3
unput_stack = []
def trace(self):
"""trace"""
def __init__(self):
"""Setup Parser"""
"""@@PARSER_INJECT@@"""
def parser_perform_action(self, yy, yystate, s, o):
"""@@ParserPerformAction@@"""
def parser_lex(self):
token = self.lexerLex()
#end = 1
if token is not None:
return token
return self.Symbols["end"]
def parse_error(self, _str='', _hash=None):
raise Exception(_str)
def lexer_error(self, _str='', _hash=None):
raise Exception(_str)
def parse(self, _input):
if self.table is None:
raise Exception("Empty ")
self.eof = ParserSymbol("Eof", 1)
first_action = ParserAction(0, self.table[0])
first_cached_action = ParserCachedAction(first_action)
stack = collections.deque(first_cached_action)
stack_count = 1
vstack = collections.deque(None)
vstach_count = 1
yy = None
_yy = None
recovering = 0
symbol = None
action = None
err_str = ''
pre_error_symbol = None
state = None
self.set_input(_input)
while True:
# retrieve state number from top of stack
state = stack[stack_count].action.state
# use default actions if available
if state is not None and self.default_actions[state.index]:
action = self.default_actions[state.index]
else:
if symbol is None:
symbol = self.parser_lex()
# read action for current state and first input
if state is not None:
action = state.actions[symbol.index]
else:
action = None
if action is None:
if recovering is 0:
# Report error
expected = []
actions = self.table[state.index].actions
for p in actions:
if self.terminals[p] is not None and p > 2:
expected.push(self.terminals[p].name)
if symbol.index in self.terminals:
got = self.terminals[symbol.index].name
else:
got = "NOTHING"
err_str = "Parser error on line " + self.yy.line_no + ":\n" + self.show + "\nExpecting " + (", ".join(expected)) + ". got '" + got + "'"
self.parse_error(self, err_str, ParserError())
# Jison generated lexer
eof = None
yy = None
match = ''
condition_stack = collections.deque()
rules = {}
conditions = {}
done = False
less = None
_more = False
input = None
offset = None
ranges = None
flex = False
line_expression = re.compile("(?:\r\n?|\n).*")
def set_input(self, _input):
self.input = InputReader(_input)
self._more = self.less = self.done = False
self.yy = ParserValue()#
self.condition_stack.clear()
self.condition_stack.append('INITIAL')
if self.ranges is not None:
self.yy.loc = ParserLocation()
self.yy.loc.set_range(ParserRange(0, 0))
else:
self.yy.loc = ParserLocation()
self.offset = 0
def input(self):
ch = self.input.ch()
self.yy.text += ch
self.yy.leng += 1
self.offset += 1
self.match += ch
lines = self.line_expression.match(ch)
if lines is not None:
self.yy.line_no += 1
self.yy.loc.last_line += 1
else:
self.yy.loc.last_column += 1
if self.ranges is not None:
self.yy.loc.range.y += 1
return ch
def unput(self, ch):
yy = ParserValue()#
_len = len(ch)
lines = self.line_expression.split(ch)
lines_count = len(lines)
self.input.un_ch(_len)
yy.text = self.yy.text[0: _len - 1]
self.offset -= _len
old_lines = self.line_expression.split(self.match)
old_lines_count = len(old_lines)
self.match = self.match[0:len(self.match) - 1]
if lines_count - 1 > 0:
yy.line_no = self.yy.line_no - lines_count - 1
r = self.yy.loc.range
old_lines_length = old_lines[old_lines_count - lines_count] if old_lines[old_lines_count - lines_count] is not None else 0
yy.loc = ParserLocation( self.yy.loc.first_line, self.yy.line_no, self.yy.loc.first_column, self.yy.loc.first_line, None)#TODO
if self.ranges is not None:
yy.loc.range(ParserRange(r.x, r.x + self.yy.leng - _len))
self.unput_stack.push(yy)
def more(self):
self._more = True
def past_input(self):
matched = self.input.to_string()
past = matched[0:len(matched) - len(self.match)]
result = past[-20].replace('\n', '')
if len(past) > 20:
return '...' + result
return result
def upcoming_input(self):
if self.done == false:
next = self.match
next_
|
len = len(next)
if next_len < 20:
next += self.input.to_string()[:20 - next_len]
else:
if next_
|
len > 20:
next = next[:-20] + '...'
return next.replace('\n', '')
def show_position(self):
pre = self.past_input()
c = '-' * len(pre)
return pre + self.upcoming_input() + '\n' + c + '^'
def next(self):
if len(self.unput_stack) > 0:
this.yy = yy
if self.done:
return self.eof
if self.input.done:
self.done = true
if self.more is false:
self.yy.text = ''
self.match = ''
rules = self.current_rules()
class ParserLocation:
first_line = 1
last_line = 0
first_column = 1
last_column = 0
range = None
def __init__(self, first_line = 1, last_line = 0, first_column = 1, last_column = 0):
self.first_line = first_line
self.last_line = last_line
self.first_column = first_column
self.last_column = last_column
def set_range(self, range):
self.range = range
class ParserValue:
leng = 0
loc = None
line_no = 0
text = None
class ParserCachedAction:
def __init__(self, action, symbol=None):
self.action = action
self.symbol = symbol
class ParserAction:
action = None
state = None
symbol = None
def __init__(self, action, state=None, symbol=None):
self.action = action
self.state = state
self.symbol = symbol
class ParserSymbol:
name = None
Index = 0
index = -1
symbols = {}
symbols_by_name = {}
def __init__(self, name, index):
self.name = name
self.index = index
def add_action(self, parser_action):
self.symbols[parser_action.index] = self.symbols_by_name[parser_action.name] = parser_action
class ParserError:
text = None
state = None
symbol = None
line_no = 0
loc = None
expected = None
def __init__(self, text, state, symbol, line_no, loc, expected):
self.text = text
self.state = state
self.symbol = symbol
self.line_no = line_no
self.loc = loc
self.expected = expected
class LexerError:
text = None
token = None
line_no = 0
def __init__(self, text, token, line_no):
self.text = text
self.token = token
self.line_no
class ParserState:
index = 0
actions = []
def __init__(self, index):
self.index = index
def set_actions(self, actions):
self.actions = actions
class ParserRange:
x = 0
y = 0
def __init__(self, x, y):
self.x = x
self.y = y
class InputReader:
input = None
length = 0
done = False
matches = []
position = 0
def __init__(self, _input):
self.input = _input
self.length = len(_input)
def add_match(self, match):
self.matches.append(match)
self.position += len(match)
self.done = (self.position >= self.length)
def ch(self):
ch = self.input[self.position]
self.add_match(ch)
return ch
def un_ch(self, ch_length):
self.position -= ch_length
self.position = max(0, self.position)
self.done = (self.position >= self.length)
def substring(self, start, end):
start = self.position if start == 0 else start + self.position
end = self.length if end == 0 else start + end
return self.input[start:end]
def match(self, rule):
matches = re.search(rule, self.position)
if matches is not None:
return matches.group()
return None
def to_string(self):
return ''.join(self.matches)
|
mducoffe/fuel
|
docs/conf.py
|
Python
|
mit
| 9,405
| 0.005853
|
# -*- coding: utf-8 -*-
#
# Fuel documentation build configuration file, created by
# sphinx-quickstart2 on Wed Oct 8 17:59:44 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinxcontrib.napoleon',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.graphviz',
'sphinx.ext.intersphinx'
]
intersphinx_mapping = {
'theano': ('http://theano.readthedocs.org/en/latest/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
'python': ('http://docs.python.org/3.4', None),
'pandas': ('http://pandas.pydata.org/pandas-docs/stable/', None)
}
graphviz_dot_args = ['-Gbgcolor=#fcfcfc'] # To match the RTD theme
# Render todo lists
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Fuel'
copyright = u'2014, Université de Montréal'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Fueldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Fuel.tex', u'Fuel Documentation',
u'Université de Montréal', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_s
|
how_urls = False
# Documents to append as an ap
|
pendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual sectio
|
chengluyu/SDU-Computer-Networks
|
assignment-2/multiprocesser.py
|
Python
|
mit
| 185
| 0
|
from multiprocessing import Pool
class Process:
|
def __init__(self, process
|
es=8):
self.p = Pool(processes)
def Exec(self, f, data):
return self.p.map(f, data)
|
adazey/Muzez
|
libs/requests/api.py
|
Python
|
gpl-3.0
| 6,115
| 0.005724
|
# -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests
|
API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request`
|
object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send data
before giving up, as a float, or a :ref:`(connect timeout, read
timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
# By using the 'with' statement we are sure the session is closed, thus we
# avoid leaving sockets open which can trigger a ResourceWarning in some
# cases, and look like a memory leak in others.
with sessions.Session() as session:
return session.request(method=method, url=url, **kwargs)
def get(url, params=None, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, params=params, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, json=None, **kwargs):
"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs)
|
tomviner/pytest
|
src/_pytest/_code/code.py
|
Python
|
mit
| 36,328
| 0.000853
|
import inspect
import re
import sys
import traceback
from inspect import CO_VARARGS
from inspect import CO_VARKEYWORDS
from traceback import format_exception_only
from types import TracebackType
from typing import Generic
from typing import Optional
from typing import Pattern
from typing import Tuple
from typing import TypeVar
from typing import Union
from weakref import ref
import attr
import pluggy
import py
import _pytest
from _pytest._io.saferepr import safeformat
from _pytest._io.saferepr import saferepr
if False: # TYPE_CHECKING
from typing import Type
class Code:
""" wrapper around Python code objects """
def __init__(self, rawcode):
if not hasattr(rawcode, "co_filename"):
rawcode = getrawcode(rawcode)
try:
self.filename = rawcode.co_filename
self.firstlineno = rawcode.co_firstlineno - 1
self.name = rawcode.co_name
except AttributeError:
raise TypeError("not a code object: {!r}".format(rawcode))
self.raw = rawcode
def __eq__(self, other):
return self.raw == other.raw
# Ignore type because of https://github.com/python/mypy/issues/4266.
__hash__ = None # type: ignore
def __ne__(self, other):
return not self == other
@property
def path(self):
""" return a path object pointing to source code (note that it
might not point to an actually existing file). """
try:
p = py.path.local(self.raw.co_filename)
# maybe don't try this checking
if not p.check():
raise OSError("py.path check failed.")
except OSError:
# XXX maybe try harder like the weird logic
# in the standard lib [linecache.updatecache] does?
p = self.raw.co_filename
return p
@property
def fullsource(self):
""" return a _pytest._code.Source object for the full source file of the code
"""
from _pytest._code import source
full, _ = source.findsource(self.raw)
return full
def source(self):
""" return a _pytest._code.Source object for the code object's source only
"""
# return source only for that part of code
import _pytest._code
return _pytest._code.Source(self.raw)
def getargs(self, var=False):
""" return a tuple with the argument names for the code object
if 'var' is set True also return the names of the variable and
keyword arguments when present
"""
# handfull shortcut for getting args
raw = self.raw
argcount = raw.co_argcount
if var:
argcount += raw.co_flags & CO_VARARGS
argcount += raw.co_flags & CO_VARKEYWORDS
return raw.co_varnames[:argcount]
class Frame:
"""Wrapper around a Python frame holding f_locals and f_globals
in which expressions can be evaluated."""
def __init__(self, frame):
self.lineno = frame.f_lineno - 1
self.f_globals = frame.f_globals
self.f_locals = frame.f_locals
self.raw = frame
self.code = Code(frame.f_code)
@property
def statement(self):
""" statement this frame is at """
import _pytest._code
if self.code.fullsource is None:
return _pytest._code.Source("")
return self.code.fullsource.getstatement(self.lineno)
def eval(self, code, **vars):
""" evaluate 'code' in the frame
'vars' are optional additional local variables
returns the result of the evaluation
"""
f_locals = self.f_locals.copy()
f_locals.update(vars)
return eval(code, self.f_globals, f_locals)
def exec_(self, code, **vars):
""" exec 'code' in the frame
'vars' are optiona; additional local variables
"""
f_locals = self.f_locals.copy()
f_locals.update(vars)
exec(code, self.f_globals, f_locals)
def repr(self, object):
""" return a 'safe' (non-recursive, one-line) string repr for 'object'
"""
return saferepr(object)
def is_true(self, object):
return object
def getargs(self, var=False):
""" return a list of tuples (name, value) for all arguments
if 'var' is set True also include the variable and keyword
arguments when present
"""
retval = []
for arg in self.code.getargs(var):
try:
retval.append((arg, self.f_locals[arg]))
except KeyError:
pass # this can occur when using Psyco
return retval
class TracebackEntry:
""" a single entry in a traceback """
_repr_style = None
exprinfo = None
def __init__(self, rawentry, excinfo=None):
self._excinfo = excinfo
self._rawentry = rawentry
self.lineno = rawentry.tb_lineno - 1
def set_repr_style(self, mode):
assert mode in ("short", "long")
self._repr_style = mode
@property
def frame(self):
import _pytest._code
return _pytest._code.Frame(self._rawentry.tb_frame)
@property
def relline(self):
return self.lineno - self.frame.code.firstlineno
def __repr__(self):
return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno + 1)
@property
def statement(self):
""" _pytest._code.Source object for the current statement """
source = self.frame.code.fullsource
return source.getstatement(self.lineno)
@property
def path(self):
""" path to the source code """
return self.frame.code.path
@property
def locals(self):
""" locals of underlaying frame """
return self.frame.f_locals
def getfirstlinesource(self):
return self.frame.code.firstlineno
def getsource(self, astcache=None):
""" return failing source code. """
# we use the passed in astcache to not reparse asttrees
# within exception info printing
from _pytest._code.source import getstatementrange_ast
source = self.frame.code.fullsource
if source is None:
return None
key = astnode = None
if astcache is not None:
key = self.frame.code.path
if key is not None:
astnode = astcache.get(key, None)
start = self.g
|
etfirstlinesource()
try:
astnode, _, end = getstatementrange_ast(
self.lineno, source, astnode=astnode
)
except SyntaxError:
end = self.lineno + 1
else:
if key is not None:
astcache[key] = astnode
return source[start:end]
source = property(getsource)
def ishidden(self):
""" return True if the current frame has
|
a var __tracebackhide__
resolving to True.
If __tracebackhide__ is a callable, it gets called with the
ExceptionInfo instance and can decide whether to hide the traceback.
mostly for internal use
"""
f = self.frame
tbh = f.f_locals.get(
"__tracebackhide__", f.f_globals.get("__tracebackhide__", False)
)
if tbh and callable(tbh):
return tbh(None if self._excinfo is None else self._excinfo())
return tbh
def __str__(self):
try:
fn = str(self.path)
except py.error.Error:
fn = "???"
name = self.frame.code.name
try:
line = str(self.statement).lstrip()
except KeyboardInterrupt:
raise
except: # noqa
line = "???"
return " File %r:%d in %s\n %s\n" % (fn, self.lineno + 1, name, line)
@property
def name(self):
""" co_name of underlaying code """
return self.frame.code.raw.co_name
class Traceback(list):
""" Traceback objects encapsulate and offer higher level
access to Traceback entries.
"""
Entry = TracebackEntry
def __init__(self, tb, excinfo=None):
""" initialize
|
ryanolson/CloudApp
|
cloudapp/identity.py
|
Python
|
gpl-3.0
| 3,128
| 0.012148
|
# -*- coding: utf-8 -*-
"""
Copyright 2013 Ryan Olson
This file is part of CloudApp.
CloudApp is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CloudApp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CloudApp. If not, see <http://www.gnu.org/licenses/>.
"""
from copy import copy
from pprint import pprint
from flask import current_app, session, g
from flask.ext.principal import UserNeed, RoleNeed, AnonymousIdentity
from flask.ext.couchdb import to_dict
from cloudapp.permissions import valid_user
from cloudapp.authentication.models import Session
def _load_user(user_id, identity):
user = g.User.load(user_id)
if user:
identity.user = user
identity.provides.add(UserNeed('Valid'))
identity.provides.add(UserNeed(user.id))
for role in user.roles:
identity.provides.add(RoleNeed(role))
else:
raise RuntimeError("user is None; user_id not found")
def _cache_identity(identity
|
):
if current_app.cache is None: return
cached_identity = copy(identity)
cached_identity.user = to_dict(identity.user)
current_app.cache.set(identity.name, cached_identity, timeout=600)
def on_load_identity(sender, identity):
"""
This function is called to load the user's identity from either
data saved in the client's session or from a identity_changed.send
signal/notificaiton.
|
This function should never be triggered unless we have passed
a valid identity; however, we should do a quick double check here
before loading the identity's allowed permissions / needs.
In the future, we may want to avoid the user lookup and utilize
memcache for the storage of the user's base information.
"""
if current_app.cache is not None:
stored_identity = current_app.cache.get(identity.name)
if stored_identity is not None:
identity.user = g.User.wrap(stored_identity.user)
identity.provides = stored_identity.provides
if current_app.testing: session['loaded_from']='memcached'
return
try:
if identity.auth_type == 'web-token':
_load_user(identity.name, identity)
_cache_identity(identity)
elif identity.auth_type == 'token':
auth_session = Session.load(identity.name)
if auth_session:
_load_user(auth_session.user_id, identity)
_cache_identity(identity)
if not session.permanent: session.permanent=True
if current_app.testing: session['loaded_from']='couchdb'
except:
g.identity = AnonymousIdentity()
session.pop('identity.name',None)
session.pop('identity.auth_type', None)
session.modified = True
|
uthaipon/SkillsWorkshop2017
|
Week01/Problem02/chaoyichang_02.py
|
Python
|
bsd-3-clause
| 76
| 0.197368
|
i,j=1,2
sum=0
while j <4000000:
i,j=j,i+j
if i%2==0:
sum=sum+i
pri
|
nt sum
|
|
mbauskar/erpnext
|
erpnext/selling/doctype/sales_order/sales_order.py
|
Python
|
gpl-3.0
| 26,900
| 0.027918
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from frappe.model.utils import get_fetch_values
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty
from frappe.desk.notifications import clear_doctype_notifications
from frappe.contacts.doctype.address.address import get_company_address
from erpnext.controllers.selling_controller import SellingController
from erpnext.subscription.doctype.subscription.subscription import month_map, get_next_date
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class WarehouseRequired(frappe.ValidationError): pass
class SalesOrder(SellingController):
def __init__(self, arg1, arg2=None):
super(SalesOrder, self).__init__(arg1, arg2)
def validate(self):
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_for_items()
self.validate_warehouse()
self.validate_drop_ship()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.validate_with_previous_doc()
self.set_status()
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date:
for d in self.get("items"):
if d.delivery_date and getdate(self.po_date) > getdate(d.delivery_date):
frappe.throw(_("Row #{0}: Expected Delivery Date cannot be before Purchase Order Date")
.format(d.idx))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0] and not cint(frappe.db.get_single_value("Selling Settings",
"allow_against_multiple_purchase_orders")):
frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no))
def validate_for_items(self):
check_list = []
for d in self.get('items'):
check_list.append(cstr(d.item_code))
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code, d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
# check for same entry multiple times
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list) and \
not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
frappe.msgprint(_("Same item has been entered multiple times"),
title=_("Warning"), indicator='orange')
def product_bundle_has_stock_item(self, product_bundle):
"""Returns true if product bundle has stock item"""
ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi
where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle))
return ret
def validate_sales_mntc_quotation(self):
for d in self.get('items'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s",
(d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}")
.format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
if self.order_type == 'Sales':
if not self.delivery_date:
self.delivery_date = max([d.delivery_date for d in self.get("items")])
if self.delivery_date:
for d in self.get("items"):
if not d.delivery_date:
d.delivery_date = self.delivery_date
if getdate(self.transaction_date) > getdate(d.delivery_date):
frappe.msgprint(_("Expected Delivery Date should be after Sales Order Date"),
indicator='orange', title=_('Warning'))
else:
frappe.throw(_("Please enter Delivery Date"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_warehouse(self):
super(SalesOrder, self).validate_warehouse()
for d in self.get("items"):
if (frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 or
(self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \
and not d.warehouse and not cint(d.delivered_by_supplier):
frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code),
WarehouseRequired)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc({
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
|
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0]))
def update_prevdoc_status(sel
|
f, flag):
for quotation in list(set([d.prevdoc_docname for d in self.get("items")])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
doc.update_opportunity()
def validate_drop_ship(self):
for d in self.get('items'):
if d.delivered_by_supplier and not d.supplier:
frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code))
def on_submit(self):
self.check_credit_limit()
self.update_reserved_qty()
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self)
self.update_project()
self.update_prevdoc_status('submit')
def on_cancel(self):
# Cannot cancel closed SO
if self.status == 'Closed':
frappe.throw(_("Closed order cannot be cancelled. Unclose to cancel."))
self.check_nextdoc_docstatus()
self.update_reserved_qty()
self.update_project()
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def update_project(self):
project_list = []
if self.project:
project = frappe.get_doc("Project", self.project)
project.flags.dont_sync_tasks = True
project.update_sales_costing()
project.save()
project_list.append(self.project)
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
check_credit_limit(self.customer, self.company)
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").f
|
bparzella/secsgem
|
secsgem/hsms/linktest_rsp_header.py
|
Python
|
lgpl-2.1
| 1,587
| 0.00063
|
#####################################################################
# linktest_rsp_header.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""Header for the hsms linktest response."""
from .header import HsmsHeader
class HsmsLinktestRspHeade
|
r(HsmsHeader):
"""
Header for Linktest Response.
Header for message with SType 6.
"""
def __init__(self, system):
"""
Initialize a hsms linktest response.
:param system: message ID
:type system
|
: integer
**Example**::
>>> import secsgem.hsms
>>>
>>> secsgem.hsms.HsmsLinktestRspHeader(10)
HsmsLinktestRspHeader({sessionID:0xffff, stream:00, function:00, pType:0x00, sType:0x06, \
system:0x0000000a, requireResponse:False})
"""
HsmsHeader.__init__(self, system, 0xFFFF)
self.requireResponse = False
self.stream = 0x00
self.function = 0x00
self.pType = 0x00
self.sType = 0x06
|
knuu/competitive-programming
|
atcoder/corp/caddi2018_e.py
|
Python
|
mit
| 1,094
| 0.002742
|
def main() -> None:
N = int(input())
A = [int(x) for x in input().split()]
rev_A = A[:]
left = [-1] * N
left_cnt = [0] * N
A_left = [A[0]]
for i in range(1, N):
if rev_A[i-1] < rev_A[i]:
cnt = 0
while rev_A[i-1]
pass
elif rev_A[i-1] < rev_A[i] * 4:
now = i-1
while left[now] != -1:
now = left[now]
left[i] = now
A_left.append(A[i])
left[i] = i-1
else:
pass
ans = 10 ** 9
for i in range(N + 1):
A = AA[:]
cnt = 0
if i > 0:
A[i-1] *= -2
cnt += 1
for j in reversed(range(i-1)):
A[j] *= -2
cnt += 1
while A[j] > A[j+1]:
A[j] *= 4
cnt += 2
for j in range(i+1, N):
|
while A[j-1] > A[j]:
A[j] *= 4
cnt += 2
print(i, cnt, A)
ans = min(ans, cnt)
print(ans)
if __name__ == '__main__':
main
|
()
|
DakRomo/2017Challenges
|
challenge_1/python/alexmunoz905/main.py
|
Python
|
mit
| 68
| 0
|
# Ale
|
xMunoz905 | 1/1/17
answer = "Hello 2017!
|
"[::-1]
print(answer)
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/radon/raw.py
|
Python
|
agpl-3.0
| 6,458
| 0
|
'''This module contains functions related to raw metrics.
The main function is :func:`~radon.raw.analyze`, and should be the only one
that is used.
'''
import tokenize
import operator
import collections
try:
import StringIO as io
except ImportError: # pragma: no cover
import io
__all__ = ['OP', 'COMMENT', 'TOKEN_NUMBER', 'NL', 'EM', 'Module', '_generate',
'_less_tokens', '_find', '_logical', 'analyze']
COMMENT = tokenize.COMMENT
OP = tokenize.OP
NL = tokenize.NL
EM = tokenize.ENDMARKER
# Helper for map()
TOKEN_NUMBER = operator.itemgetter(0)
# A module object. It contains the following data:
# loc = Lines of Code (total lines)
# lloc = Logical Lines of Code
# comments = Comments lines
# blank = Blank lines (or whitespace-only lines)
Module = collections.namedtuple('Module', ['loc', 'lloc', 'sloc',
'comments', 'multi', 'blank'])
def _generate(code):
'''Pass the code into `tokenize.generate_tokens` and convert the result
into a list.
'''
return list(tokenize.generate_tokens(io.StringIO(code).readline))
def _less_tokens(tokens, remove):
'''Process the output of `tokenize.generate_tokens` removing
the tokens specified in `remove`.
'''
for values in tokens:
if values[0] in remove:
continue
yield values
def _find(tokens, token, value):
'''Return the position of the last token with the same (token, value)
pair supplied. The position is the one of the rightmost term.
'''
for index, token_values in enumerate(reversed(tokens)):
if (token, value) == token_values[:2]:
return len(tokens) - index - 1
raise ValueError('(token, value) pair not found')
def _split_tokens(tokens, token, value):
'''Split a list of tokens on the specified token pair (token, value),
where *token* is the token type (i.e. its code) and *value* its actual
value in the code.
'''
res = [[]]
for token_values in tokens:
if (token, value) == token_values[:2]:
res.append([])
continue
res[-1].append(token_values)
return res
def _get_all_tokens(line, lines):
'''Starting from *line*, generate the necessary tokens which represent the
shortest tokenization possible. This is done by catching
:exc:`tokenize.TokenError` when a multi-line string or statement is
encountered.
'''
sloc_increment = multi_increment = 0
try:
tokens = _generate(line)
except tokenize.TokenError:
# A multi-line string or statement has been encountered:
# start adding lines and stop when tokenize stops complaining
while True:
sloc_increment += 1
line = '\n'.join([line, next(lines)])
try:
tokens = _generate(line)
except tokenize.TokenError:
continue
if tokens[0][0] == 3 and len(tokens) == 2:
# Multi-line string detected
multi_increment += line.count('\n') + 1
break
return tokens, sloc_increment, multi_increment
def _logical(tokens):
'''Find how many logical lines are there in the current line.
Normally 1 line of code is equivalent to 1 logical line of code,
but there are cases when this is not true. For example::
if cond: return 0
this line actually corresponds to 2 logical lines, since it can be
translated into::
if cond:
return 0
Examples::
if cond: -> 1
if cond: return 0 -> 2
try: 1/0 -> 2
try: -> 1
if cond: # Only a comment -> 1
if cond: return 0 # Only a comment -> 2
'''
def aux(sub_tokens):
'''The actual function which does the job.'''
# Get the tokens and, in the meantime, remove comments
processed = list(_less_tokens(sub_t
|
okens, [COMMENT]))
try:
# Verify whether a colon is present among the tokens and that
# it is the last token.
token_pos = _find(processed, OP, ':')
return 2 - (token_pos == len(processed) - 2)
except ValueError:
# The colon is not present
# If the line is only composed by comments, newlines and endmarker
# then it doe
|
s not count as a logical line.
# Otherwise it count as 1.
if not list(_less_tokens(processed, [NL, EM])):
return 0
return 1
return sum(aux(sub) for sub in _split_tokens(tokens, OP, ';'))
def analyze(source):
'''Analyze the source code and return a namedtuple with the following
fields:
* **loc**: The number of lines of code (total)
* **lloc**: The number of logical lines of code
* **sloc**: The number of source lines of code (not necessarily
corresponding to the LLOC)
* **comments**: The number of Python comment lines
* **multi**: The number of lines which represent multi-line strings
* **blank**: The number of blank lines (or whitespace-only ones)
The equation :math:`sloc + blanks = loc` should always hold.
Multiline strings are not counted as comments, since, to the Python
interpreter, they are not comments but strings.
'''
loc = sloc = lloc = comments = multi = blank = 0
lines = iter(source.splitlines())
for lineno, line in enumerate(lines, 1):
loc += 1
line = line.strip()
if not line:
blank += 1
continue
# If this is not a blank line, then it counts as a
# source line of code
sloc += 1
try:
# Process a logical line that spans on multiple lines
tokens, sloc_incr, multi_incr = _get_all_tokens(line, lines)
except StopIteration:
raise SyntaxError('SyntaxError at line: {0}'.format(lineno))
# Update tracked metrics
loc += sloc_incr # LOC and SLOC increments are the same
sloc += sloc_incr
multi += multi_incr
# Add the comments
comments += list(map(TOKEN_NUMBER, tokens)).count(COMMENT)
# Process a logical line
# Split it on semicolons because they increase the number of logical
# lines
for sub_tokens in _split_tokens(tokens, OP, ';'):
lloc += _logical(sub_tokens)
return Module(loc, lloc, sloc, comments, multi, blank)
|
justinmeister/PyTMX-Examples
|
Make Collideable Rects/main.py
|
Python
|
mit
| 3,683
| 0.002987
|
import sys, os
import pygame as pg
import tilerender
"""Initialize pygame, create a clock, create the window
with a surface to blit the map onto."""
pg.init()
fps_clock = pg.time.Clock()
main_surface = pg.display.set_mode((420, 420))
main_rect = main_surface.get_rect()
"""Load the tmx file from the current directory,
create the tile_renderer object and load the tmx
file."""
tmx_file = os.path.join(os.getcwd(), 'test.tmx')
tile_renderer = tilerender.Renderer(tmx_file)
"""Create the map surface using the make_map()
method. Used to blit onto the main_surface."""
map_surface = tile_renderer.make_map()
map_rect = map_surface.get_rect()
"""Create a list of rects called "blockers" that the
player can collide with. The getObjects() method
returns a list of objects in your tile map. Each
tile has properties like name, type, x, y,
|
width,
height. Double click objects in Tiled to see these
properties. These properties are used to make rect
objects in Pygame."""
blockers = []
tilewidth = tile_renderer.tmx_data.tilewidth
for tile_object in tile_renderer.tmx_data.getObjects():
properties = tile_object.__dict__
if properties['name'] == 'blocker':
x = properties['x']
y = proper
|
ties['y']
width = properties['width']
height = properties['height']
new_rect = pg.Rect(x, y, width, height)
blockers.append(new_rect)
"""
The Player class will be a player-controlled sprite
that will collide with the blockers we just created.
We pass in the blockers as a constructor argument so
that we can assign them as an attribute. During the
update method, we can refer to this attribute to detect
collision.
"""
class Player(pg.sprite.Sprite):
def __init__(self, blockers):
super(Player, self).__init__()
self.image = pg.Surface((22, 22))
self.image.fill((130, 100, 200))
self.rect = self.image.get_rect(x=100,
y=300)
self.x_vel = 0
self.y_vel = 0
self.blockers = blockers
def update(self, keys):
"""
Set player velocity by keys, move by velocity, check
for collision. It's important to check collisions
for both on the x-axis and y-axis, rather than just once.
"""
if keys[pg.K_DOWN]:
self.y_vel = 3
elif keys[pg.K_UP]:
self.y_vel = -3
else:
self.y_vel = 0
if keys[pg.K_LEFT]:
self.x_vel = -3
elif keys[pg.K_RIGHT]:
self.x_vel = 3
else:
self.x_vel = 0
self.rect.x += self.x_vel
for blocker in self.blockers:
if self.rect.colliderect(blocker):
self.rect.x -= self.x_vel
self.x_vel = 0
self.rect.y += self.y_vel
for blocker in self.blockers:
if self.rect.colliderect(blocker):
self.rect.y -= self.y_vel
self.y_vel = 0
def draw(self, surface):
"""
Blit player image to screen.
"""
surface.blit(self.image, self.rect)
player = Player(blockers)
"""Simple game loop that updates the player sprite,
blits the map_surface onto the main surface, and blits
the player sprite onto the main surface.
"""
def main():
while True:
keys = pg.key.get_pressed()
player.update(keys)
main_surface.blit(map_surface, map_rect)
player.draw(main_surface)
for event in pg.event.get():
if event.type == pg.QUIT:
pg.quit()
sys.exit()
pg.display.update()
fps_clock.tick(60)
if __name__ == "__main__":
main()
|
mattseymour/django
|
tests/validation/models.py
|
Python
|
bsd-3-clause
| 4,761
| 0.003781
|
from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
def validate_answer_to_universe(value):
if value != 42:
raise ValidationError('This is not the answer to life, universe and everything!', code='not
|
42')
class ModelToValidate(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField(default=datetime.now)
number = models.IntegerField(db_column='number_val')
parent = models.ForeignKey(
'self',
models.SET_NULL,
blank=True, null=True,
limit_choices_to={'number': 10},
)
email = mode
|
ls.EmailField(blank=True)
ufm = models.ForeignKey(
'UniqueFieldsModel',
models.SET_NULL,
to_field='unique_charfield',
blank=True, null=True,
)
url = models.URLField(blank=True)
f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe])
f_with_iterable_of_validators = models.IntegerField(blank=True, null=True,
validators=(validate_answer_to_universe,))
slug = models.SlugField(blank=True)
def clean(self):
super(ModelToValidate, self).clean()
if self.number == 11:
raise ValidationError('Invalid number supplied!')
class UniqueFieldsModel(models.Model):
unique_charfield = models.CharField(max_length=100, unique=True)
unique_integerfield = models.IntegerField(unique=True)
non_unique_field = models.IntegerField()
class CustomPKModel(models.Model):
my_pk_field = models.CharField(max_length=100, primary_key=True)
class UniqueTogetherModel(models.Model):
cfield = models.CharField(max_length=100)
ifield = models.IntegerField()
efield = models.EmailField()
class Meta:
unique_together = (('ifield', 'cfield',), ['ifield', 'efield'])
class UniqueForDateModel(models.Model):
start_date = models.DateField()
end_date = models.DateTimeField()
count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date")
order = models.IntegerField(unique_for_month="end_date")
name = models.CharField(max_length=100)
class CustomMessagesModel(models.Model):
other = models.IntegerField(blank=True, null=True)
number = models.IntegerField(
db_column='number_val',
error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'},
validators=[validate_answer_to_universe]
)
class Author(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author, models.CASCADE)
pub_date = models.DateTimeField(blank=True)
def clean(self):
if self.pub_date is None:
self.pub_date = datetime.now()
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __str__(self):
return self.name
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
class UniqueErrorsModel(models.Model):
name = models.CharField(max_length=100, unique=True, error_messages={'unique': 'Custom unique name message.'})
no = models.IntegerField(unique=True, error_messages={'unique': 'Custom unique number message.'})
class GenericIPAddressTestModel(models.Model):
generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True)
v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4")
v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6")
ip_verbose_name = models.GenericIPAddressField("IP Address Verbose", blank=True, null=True)
class GenericIPAddrUnpackUniqueTest(models.Model):
generic_v4unpack_ip = models.GenericIPAddressField(null=True, blank=True, unique=True, unpack_ipv4=True)
# A model can't have multiple AutoFields
# Refs #12467.
assertion_error = None
try:
class MultipleAutoFields(models.Model):
auto1 = models.AutoField(primary_key=True)
auto2 = models.AutoField(primary_key=True)
except AssertionError as exc:
assertion_error = exc
assert str(assertion_error) == "A model can't have more than one AutoField."
|
projectchrono/chrono
|
src/demos/python/core/demo_CH_coords.py
|
Python
|
bsd-3-clause
| 2,825
| 0.024425
|
#------------------------------------------------------------------------------
# Name: pychrono example
# Purpose:
#
# Author: Alessandro Tasora
#
# Created: 1/01/2019
# Copyright: (c) ProjectChrono 2019
#------------------------------------------------------------------------------
print ("First tutorial for PyChrono: vectors, matrices etc.");
# Load the Chrono::Engine core module!
import pychrono as chrono
try:
import numpy as np
from numpy import linalg as LA
except ImportError:
print("You need NumPyto run this demo!")
# Test logging
chrono.GetLog().Bar()
chrono.GetLog() << "result is: " << 11+1.5 << "\n"
chrono.GetLog().Bar()
# Test vectors
my_vect1 = chrono.ChVectorD()
my_vect1.x=5
my_vect1.y=2
my_vect1.z=3
my_vect2 = chrono.ChVectorD(3,4,5)
my_vect4 = my_vect1*10 + my_vect2
my_len = my_vect4.Length()
print ('vect sum =', my_vect1 + my_vect2)
print ('vect cross =', my_vect1 % my_vect2)
print ('vect dot =', my_vect1 ^ my_vect2)
# Test quaternions
my_quat = chrono.ChQuaternionD(1,2,3,4)
my_qconjugate = ~my_quat
print ('quat. conjugate =', my_qconjugate)
print ('quat. dot product=', my_qconjugate ^ my_quat)
print ('quat. product=', my_qconjugate % my_quat)
# Test matrices and NumPy interoperability
mlist = [[1,2,3,4], [5,6,7,8], [9,10,11,12], [13,14,15,16]]
ma = chrono.ChMatrixDynamicD()
ma.SetMatr(mlist) # Create a Matrix from a list. Size is adjusted automatically.
npmat = np.asarray(ma.GetMatr()) # Create a 2D npy array
|
from the list extracted from ChMatrixDynamic
w, v = LA.eig(npmat) # get eigenvalues and eigenvectors using numpy
mb = chrono.ChMatrixDyna
|
micD(4,4)
prod = v * npmat # you can perform linear algebra operations with numpy and then feed results into a ChMatrixDynamicD using SetMatr
mb.SetMatr(v.tolist()) # create a ChMatrixDynamicD from the numpy eigenvectors
mr = chrono.ChMatrix33D()
mr.SetMatr([[1,2,3], [4,5,6], [7,8,9]])
print (mr*my_vect1);
# Test frames -
# create a frame representing a translation and a rotation
# of 20 degrees on X axis
my_frame = chrono.ChFrameD(my_vect2, chrono.Q_from_AngAxis(20*chrono.CH_C_DEG_TO_RAD, chrono.ChVectorD(1,0,0)))
my_vect5 = my_vect1 >> my_frame
# Print the class hierarchy of a chrono class
import inspect
inspect.getmro(chrono.ChStreamOutAsciiFile)
# Use the ChFunction classes
my_funct = chrono.ChFunction_Sine(0,0.5,3)
print ('function f(0.2)=', my_funct.Get_y(0.2) )
# Inherit from the ChFunction, from the Python side,
# (do not forget the __init__ constructor)
class MySquareFunct (chrono.ChFunction):
def __init__(self):
chrono.ChFunction.__init__(self)
def Get_y(self,x):
return x*x
my_funct2 = MySquareFunct()
print ('function f(2) =', my_funct2.Get_y(3) )
print ('function df/dx=', my_funct2.Get_y_dx(3) )
|
sander76/home-assistant
|
homeassistant/components/vera/__init__.py
|
Python
|
apache-2.0
| 9,850
| 0.000609
|
"""Support for Vera devices."""
from __future__ import annotations
import asyncio
from collections import defaultdict
import logging
from typing import Any, Generic, TypeVar
import pyvera as veraApi
from requests.exceptions import RequestException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ARMED,
ATTR_BATTERY_LEVEL,
ATTR_LAST_TRIP_TIME,
ATTR_TRIPPED,
CONF_EXCLUDE,
CONF_LIGHTS,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import convert, slugify
from homeassistant.util.dt import utc_from_timestamp
from .common import (
ControllerData,
SubscriptionRegistry,
get_configured_platforms,
get_controller_data,
set_controller_data,
)
from .config_flow import fix_device_id_list, new_options
from .const import (
ATTR_CURRENT_ENERGY_KWH,
ATTR_CURRENT_POWER_W,
CONF_CONTROLLER,
CONF_LEGACY_UNIQUE_ID,
DOMAIN,
VERA_ID_FORMAT,
)
_LOGGER = logging.getLogger(__name__)
VERA_ID_LIST_SCHEMA = vol.Schema([int])
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CONTROLLER): cv.url,
vol.Optional(CONF_EXCLUDE, default=[]): VERA_ID_LIST_SCHEMA,
vol.Optional(CONF_LIGHTS, default=[]): VERA_ID_LIST_SCHEMA,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool:
"""Set up for Vera controllers."""
hass.data[DOMAIN] = {}
config = base_config.get(DOMAIN)
if not config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=config,
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Do setup of vera."""
# Use options entered during initial config flow or provided from configuration.yml
if entry.data.get(CONF_LIGHTS) or entry.data.get(CONF_EXCLUDE):
hass.config_entries.async_update_entry(
entry=entry,
data=entry.data,
options=new_options(
entry.data.get(CONF_LIGHTS, []),
entry.data.get(CONF_EXCLUDE, []),
),
)
saved_light_ids = entry.options.get(CONF_LIGHTS, [])
saved_exclude_ids = entry.options.get(CONF_EXCLUDE, [])
base_url = entry.data[CONF_CONTROLLER]
light_ids = fix_device_id_list(saved_light_ids)
exclude_ids = fix_device_id_list(saved_exclude_ids)
# If the ids were corrected. Update the config entry.
if light_ids != saved_light_ids or exclude_ids != saved_exclude_ids:
hass.config_entries.async_update_entry(
entry=entry, options=new_options(light_ids, exclude_ids)
)
# Initialize the Vera controller.
subscription_registry = SubscriptionRegistry(hass)
controller = veraApi.VeraController(base_url, subscription_registry)
try:
all_devices = await hass.async_add_executor_job(controller.get_devices)
all_scenes = await hass.async_add_executor_job(controller.get_scenes)
except RequestException as exception:
# There was a network related error connecting to the Vera controller.
_LOGGER.exception("Error communicating with Vera API")
raise ConfigEntryNotReady from exception
# Exclude devices unwanted by user.
devices = [device for device in all_devices if device.device_id not in exclude_ids]
vera_devices = defaultdict(list)
for device in devices:
device_type = map_vera_device(device, light_ids)
if device_type is not None:
vera_devices[device_type].append(device)
vera_scenes = []
for scene in all_scenes:
vera_scenes.append(scene)
controller_data = ControllerData(
controller=controller,
devices=vera_devices,
scenes=vera_scenes,
config_entry=entry,
)
set_controller_data(hass, entry, controller_data)
# Forward the config data to the necessary platforms.
for platform in get_configured_platforms(controller_data):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
def stop_subscription(event):
"""Stop SubscriptionRegistry updates."""
controller.stop()
await hass.async_add_executor_job(controller.start)
entry.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_subscription)
)
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload Withings config entry."""
controller_data: ControllerData = get_controller_data(hass, config_entry)
tasks = [
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in get_configured_platforms(controller_data)
]
tasks.append(hass.async_add_executor_job(controller_data.controller.stop))
await asyncio.gath
|
er(*tasks)
return True
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
def map_vera_d
|
evice(vera_device: veraApi.VeraDevice, remap: list[int]) -> str:
"""Map vera classes to Home Assistant types."""
type_map = {
veraApi.VeraDimmer: "light",
veraApi.VeraBinarySensor: "binary_sensor",
veraApi.VeraSensor: "sensor",
veraApi.VeraArmableDevice: "switch",
veraApi.VeraLock: "lock",
veraApi.VeraThermostat: "climate",
veraApi.VeraCurtain: "cover",
veraApi.VeraSceneController: "sensor",
veraApi.VeraSwitch: "switch",
}
def map_special_case(instance_class: type, entity_type: str) -> str:
if instance_class is veraApi.VeraSwitch and vera_device.device_id in remap:
return "light"
return entity_type
return next(
iter(
map_special_case(instance_class, entity_type)
for instance_class, entity_type in type_map.items()
if isinstance(vera_device, instance_class)
),
None,
)
DeviceType = TypeVar("DeviceType", bound=veraApi.VeraDevice)
class VeraDevice(Generic[DeviceType], Entity):
"""Representation of a Vera device entity."""
def __init__(
self, vera_device: DeviceType, controller_data: ControllerData
) -> None:
"""Initialize the device."""
self.vera_device = vera_device
self.controller = controller_data.controller
self._name = self.vera_device.name
# Append device id to prevent name clashes in HA.
self.vera_id = VERA_ID_FORMAT.format(
slugify(vera_device.name), vera_device.vera_device_id
)
if controller_data.config_entry.data.get(CONF_LEGACY_UNIQUE_ID):
self._unique_id = str(self.vera_device.vera_device_id)
else:
self._unique_id = f"vera_{controller_data.config_entry.unique_id}_{self.vera_device.vera_device_id}"
async def async_added_to_hass(self) -> None:
"""Subscribe to updates."""
self.controller.register(self.vera_device, self._update_callback)
def _update_callback(self, _device: DeviceType) -> None:
"""Update the state."""
self.schedule_update_ha_state(True)
def update(self):
"""Force a refresh from the device if the device is unavailable."""
refresh_needed = self.vera_device.should_poll or not self.available
_LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed)
if refresh_needed:
self.vera_device.refresh()
|
andreaso/ansible
|
lib/ansible/modules/cloud/amazon/cloudfront_facts.py
|
Python
|
gpl-3.0
| 31,932
| 0.005386
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudfront_facts
short_description: Obtain facts about an AWS CloudFront distribution
description:
- Gets information about an AWS CloudFront distribution
requirements:
- boto3 >= 1.0.0
- python >= 2.6
version_added: "2.3"
author: Willem van Ketwich (@wilvk)
options:
distribution_id:
description:
- The id of the CloudFront distribution. Used with I(distribution), I(distribution_config),
I(invalidation), I(streaming_distribution), I(streaming_distribution_config), I(list_invalidations).
required: false
invalidation_id:
description:
- The id of the invalidation to get information about. Used with I(invalidation).
required: false
origin_access_identity_id:
description:
- The id of the cloudfront origin access identity to get information about.
required: false
web_acl_id:
description:
- Used with I(list_distributions_by_web_acl_id).
required: false
domain_name_alias:
description:
- Can be used instead of I(distribution_id) - uses the aliased CNAME for the cloudfront
distribution to get the distribution id where required.
required: false
all_lists:
description:
- Get all cloudfront lists that do not require parameters.
required: false
default: false
origin_access_identity:
description:
- Get information about an origin access identity. Requires I(origin_access_identity_id)
to be specified.
required: false
default: false
origin_access_identity_config:
description:
- Get the configuration information about an origin access identity. Requires
I(origin_access_identity_id) to be specified.
required: false
default: false
distribution:
description:
- Get information about a distribution. Requires I(distribution_id) or I(domain_name_alias)
to be specified.
required: false
default: false
distribution_config:
description:
- Get the configuration information about a distribution. Requires I(distribution_id)
or I(domain_name_alias) to be specified.
required: false
default: false
invalidation:
description:
- Get information about an invalidation. Requires I(invalidation_id) to be specified.
required: false
default: false
streaming_distribution:
description:
- Get information about a specified RTMP distribution. Requires I(distribution_id) or
I(domain_name_alias) to be specified.
requir
|
ed: false
default: false
streaming_distribution_configuration:
description:
- Get the configuration information about a specified RTMP distribution.
Requires I(distribution_id) or I(domain_name_alias) to be specified.
required: false
defa
|
ult: false
list_origin_access_identities:
description:
- Get a list of cloudfront origin access identities. Requires I(origin_access_identity_id) to be set.
required: false
default: false
list_distributions:
description:
- Get a list of cloudfront distributions.
required: false
default: false
list_distributions_by_web_acl_id:
description:
- Get a list of distributions using web acl id as a filter. Requires I(web_acl_id) to be set.
required: false
default: false
list_invalidations:
description:
- Get a list of invalidations. Requires I(distribution_id) or I(domain_name_alias) to be specified.
required: false
default: false
list_streaming_distributions:
description:
- Get a list of streaming distributions.
required: false
default: false
summary:
description:
- Returns a summary of all distributions, streaming distributions and origin_access_identities.
This is the default behaviour if no option is selected.
required: false
default: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Get a summary of distributions
- cloudfront_facts:
summary: true
# Get information about a distribution
- cloudfront_facts:
distribution: true
distribution_id: my-cloudfront-distribution-id
# Get information about a distribution using the CNAME of the cloudfront distribution.
- cloudfront_facts:
distribution: true
domain_name_alias: www.my-website.com
# Facts are published in ansible_facts['cloudfront'][<distribution_name>]
- debug:
msg: "{{ ansible_facts['cloudfront']['my-cloudfront-distribution-id'] }}"
- debug:
msg: "{{ ansible_facts['cloudfront']['www.my-website.com'] }}"
# Get all information about an invalidation for a distribution.
- cloudfront_facts:
invalidation: true
distribution_id: my-cloudfront-distribution-id
invalidation_id: my-cloudfront-invalidation-id
# Get all information about a cloudfront origin access identity.
- cloudfront_facts:
origin_access_identity: true
origin_access_identity_id: my-cloudfront-origin-access-identity-id
# Get all information about lists not requiring parameters (ie. list_origin_access_identities, list_distributions, list_streaming_distributions)
- cloudfront_facts:
origin_access_identity: true
origin_access_identity_id: my-cloudfront-origin-access-identity-id
# Get all information about lists not requiring parameters (ie. list_origin_access_identities, list_distributions, list_streaming_distributions)
- cloudfront_facts:
all_lists: true
'''
RETURN = '''
origin_access_identity:
description: Describes the origin access identity information. Requires I(origin_access_identity_id) to be set.
returned: only if I(origin_access_identity) is true
type: dict
origin_access_identity_configuration:
description: Describes the origin access identity information configuration information. Requires I(origin_access_identity_id) to be set.
returned: only if I(origin_access_identity_configuration) is true
type: dict
distribution:
description: >
Facts about a cloudfront distribution. Requires I(distribution_id) or I(domain_name_alias)
to be specified. Requires I(origin_access_identity_id) to be set.
returned: only if distribution is true
type: dict
distribution_config:
description: >
Facts about a cloudfront distribution's config. Requires I(distribution_id) or I(domain_name_alias)
to be specified.
returned: only if I(distribution_config) is true
type: dict
invalidation:
description: >
Describes the invalidation information for the distribution. Requires
I(invalidation_id) to be specified and either I(distribution_id) or I(domain_name_alias.)
returned: only if invalidation is true
type: dict
streaming_distribution:
description: >
Describes the streaming information for the distribution. Requires
I(distribution_id) or I(domain_name_alias) to be specified.
returned: only if I(streaming_distribution) is
|
william-richard/moto
|
moto/kinesisvideoarchivedmedia/responses.py
|
Python
|
apache-2.0
| 3,369
| 0.002671
|
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from .models import kinesisvideoarchivedmedia_backends
import json
class KinesisVideoArchivedMediaResponse(BaseResponse):
SERVICE_NAME = "kinesis-video-archived-media"
@property
def kinesisvideoarchivedmedia_backend(self):
return kinesisvideoarchivedmedia_backends[self.region]
def get_hls_streaming_session_url(self):
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
playback_mode = self._get_param("PlaybackMode")
hls_fragment_selector = self._get_param("HLSFragmentSelector")
container_format = self._get_param("ContainerFormat")
discontinuity_mode = self._get_param("DiscontinuityMode")
display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
expires = self._get_int_param("Expires")
max_media_playlist_fragment_results = self._get_param(
"MaxMediaPlaylistFragmentResults"
)
hls_streaming_session_url = self.kinesisvideoarchived
|
media_backend.get_hls_streaming_session_url(
stream_name=stream_name,
|
stream_arn=stream_arn,
playback_mode=playback_mode,
hls_fragment_selector=hls_fragment_selector,
container_format=container_format,
discontinuity_mode=discontinuity_mode,
display_fragment_timestamp=display_fragment_timestamp,
expires=expires,
max_media_playlist_fragment_results=max_media_playlist_fragment_results,
)
return json.dumps(dict(HLSStreamingSessionURL=hls_streaming_session_url))
def get_dash_streaming_session_url(self):
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
playback_mode = self._get_param("PlaybackMode")
display_fragment_timestamp = self._get_param("DisplayFragmentTimestamp")
display_fragment_number = self._get_param("DisplayFragmentNumber")
dash_fragment_selector = self._get_param("DASHFragmentSelector")
expires = self._get_int_param("Expires")
max_manifest_fragment_results = self._get_param("MaxManifestFragmentResults")
dash_streaming_session_url = self.kinesisvideoarchivedmedia_backend.get_dash_streaming_session_url(
stream_name=stream_name,
stream_arn=stream_arn,
playback_mode=playback_mode,
display_fragment_timestamp=display_fragment_timestamp,
display_fragment_number=display_fragment_number,
dash_fragment_selector=dash_fragment_selector,
expires=expires,
max_manifest_fragment_results=max_manifest_fragment_results,
)
return json.dumps(dict(DASHStreamingSessionURL=dash_streaming_session_url))
def get_clip(self):
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
clip_fragment_selector = self._get_param("ClipFragmentSelector")
content_type, payload = self.kinesisvideoarchivedmedia_backend.get_clip(
stream_name=stream_name,
stream_arn=stream_arn,
clip_fragment_selector=clip_fragment_selector,
)
new_headers = {"Content-Type": content_type}
return payload, new_headers
|
onoga/toolib
|
toolib/wx/grid/table/TNumberDataLines.py
|
Python
|
gpl-2.0
| 844
| 0.043839
|
class TNumberDataLines(object):
"""
Adapter used in
TTableClipboard
Requires:
getNumberDataRows or GetNumberRows
getNumberDataCols or GetNumberCols
Provide
|
s:
_getNumberDataRows
_getNumberDataCols
_getBaseDataRow
_getBaseDataCol
"""
def _getBaseDataRow(self):
if hasattr(self, 'getBaseDataRow'):
return self.getBaseDataRow()
else:
return 0
def _getBaseDataCo
|
l(self):
if hasattr(self, 'getBaseDataCol'):
return self.getBaseDataCol()
else:
return 0
def _getNumberDataRows(self):
if hasattr(self, 'getNumberDataRows'):
return self.getNumberDataRows()
else:
return self.GetNumberRows() - self._getBaseDataRow()
def _getNumberDataCols(self):
if hasattr(self, 'getNumberDataCols'):
return self.getNumberDataCols()
else:
return self.GetNumberCols() - self._getBaseDataCol()
|
Thib17/biggraphite
|
biggraphite/drivers/_delayed_writer.py
|
Python
|
apache-2.0
| 5,350
| 0.000187
|
#!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downsampling helpers for drivers that do not implement it server-side."""
from __future__ import absolute_import
from __future__ import print_function
import collections
import logging
import time
log = logging.getLogger(__name__)
class DelayedWriter(object):
"""Delay writes."""
DEFAULT_PERIOD_MS = 600000
def __init__(self, accessor, period_ms=DEFAULT_PERIOD_MS):
"""Create a DelayedWriter.
The delayed writer will separate high resolution points and low
resolution points and will write the low resolution ones every
`period_ms` milliseconds.
For these points the value for a given timestamp is frequently
updated and we can safely delay the writes. In case of an unclean
shutdown we might loose up to `period_ms` points of data.
Args:
accessor: a connected accessor.
period_ms: delay before writing low resolution points.
"""
self.accessor = accessor
self.period_ms = period_ms
self._queue = []
self._metrics_per_ms = 0
self._last_write_ms = 0
self._points = collections.defaultdict(dict)
def clear(self):
"""Reset internal structures."""
self._queue = []
self._points.clear()
def feed(self, metric, datapoints):
"""Feed the delayed writer.
This function will seperate datapoints based on their
resolutions and keep the low resolution points for later.
Args:
metric: the metric associated with these points.
datapoints: downsampled datapoints.
Returns:
list(datapoints) list of high resolution points that
should get written now.
"""
high_res, low_res = [], []
for datapoint in datapoints:
_, _, _, stage = datapoint
# In case of unclean shutdown we could loose up to
# 25% of the data. We also allow a lag of up to 1/4th of
# a period. stage0 are never delayed.
if stage.stage0 or stage.precision_ms < (self.period_ms * 4):
high_res.append(datapoint)
else:
low_res.append(datapoint)
self.write_later(metric, low_res)
# We piggy back on feed() to write delayed points, this works
# as long as we receive points regularly. We might want to add
# a timer at some point.
self.write_some()
return high_res
def flush(self):
"""Flush all buffered points."""
self._build_queue()
while self._queue:
self.write_some(flush=True)
def size(self):
"""Number of queued metrics."""
return len(self._points)
def write_later(self, metric, datapoints):
"""Queue points for later."""
for datapoint in datapoints:
timestamp, value, count, stage = datapoint
self._points[metric][(stage, timestamp)] = (value, count)
self._build_queue()
def _build_queue(self):
"""Build the queue of metrics to write."""
if len(self._queue) > 0:
return
# Order by number of points.
self._queue = sorted(self._points.keys(), key=lambda k: len(self._points[k]))
# We know that we have up to `period_ms` to write everything
# so let's write only a few metrics per iteration.
self._metrics_per_ms = float(len(self._queue)) / self.period_ms
log.debug(
"rebuilt the queues: %d metrics, %d per second",
len(self._queue),
self._metrics_per_ms,
)
def write_some(self, f
|
lush=False, now=time.time):
"""Write some points from the queue."""
now = now() * 1000 # convert to ms.
if self._last_write_ms == 0:
self._last_write_ms = now
delta_ms = (now - self._last_write_ms) + 1
if flush:
metrics_to_
|
write = len(self._queue)
else:
metrics_to_write = round(delta_ms * self._metrics_per_ms)
if metrics_to_write == 0:
return
i = 0
log.debug("writing low res points for %d metrics" % metrics_to_write)
while self._queue and i < metrics_to_write:
metric = self._queue.pop()
datapoints = []
# collect the points to write them.
for k, v in self._points[metric].items():
stage, timestamp = k
value, count = v
i += 1
datapoints.append((timestamp, value, count, stage))
self.accessor.insert_downsampled_points_async(metric, datapoints)
# remove the points that have been written
del self._points[metric]
self._last_write_ms = now
|
pomeslab/matplotdash
|
server/fake_dataserver.py
|
Python
|
mit
| 2,830
| 0.00318
|
"""
A fake time series data server used during development to serve up JSON
for live updating plot rendering in the browser. The test datafile has
25000 points and cycles through this entire series every 60 minutes.
Examples
--------
>>> python fake_dataserver.py
>>> import requests; requests.get('http://127.0.0.1:5000/ts/10').content
"""
from datetime import datetime, timedelta
from flask import Flask, jsonify
from flask import make_response, request, current_app
from functools import update_wrapper
app = Flask(__name__)
points_per_hour = 6000
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
|
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = ge
|
t_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
# Open local datafile (omit the standard error column)
with open ("time_val_error_data.dat","r") as datafile:
lines = datafile.readlines()[:points_per_hour]
(t, v) = zip(*[(float(l.split()[0]), float(l.split()[1])) for l in lines])
@app.route("/ts/", defaults={'points': 100})
@app.route("/ts/<int:points>", methods=["GET"])
@crossdomain(origin='*')
def get_data(points):
now = datetime.now()
fraction=int(len(t)*(now.minute+(now.second/60.0))/60.0)
ts = {'time': range(points),
'value': v[fraction:fraction+points]}
#ts = {'time': t[fraction:fraction+points],
# 'value': v[fraction:fraction+points]}
return jsonify({'ts': ts})
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000)
|
carmark/vbox
|
src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
|
Python
|
gpl-2.0
| 12,929
| 0.009204
|
## @file
# This file is used to define class objects of INF file [Ppis] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
'''
InfPpiObject
'''
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.Misc import Sdict
from Library import DataType as DT
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
def ParsePpiComment(CommentsList, InfPpiItemObj):
PreNotify = None
PreUsage = None
PreHelpText = ''
BlockFlag = -1
CommentInsList = []
Count = 0
for CommentItem in CommentsList:
Count = Count + 1
CommentItemUsage, \
CommentItemNotify, \
CommentItemString, \
CommentItemHelpText = \
ParseComment(CommentItem,
DT.ALL_USAGE_TOKENS,
DT.PPI_NOTIFY_TOKENS,
['PPI'],
False)
#
# To avoid PyLint error
#
if CommentItemString:
pass
if CommentItemHelpText == None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
#
# For the Last comment Item, set BlockFlag.
#
if Count == len(CommentsList):
if BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
BlockFlag = 4
else:
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
#
# Comment USAGE and NOTIFY information are "UNDEFINED"
#
if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
elif BlockFlag == 1:
BlockFlag = 2
else:
if BlockFlag == 1 or BlockFlag == 2:
BlockFlag = 3
#
# An item have Usage or Notify information and the first time get this information
#
elif BlockFlag == -1:
BlockFlag = 4
#
# Combine two comment line if they are generic comment
#
if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
#
# Store this information for next line may still need combine operation.
#
PreHelpText = CommentItemHelpText
if BlockFlag == 4:
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
elif BlockFlag == 3:
#
# Add previous help string
#
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
PreHelpText += DT.END_OF_LINE
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
# Add Current help string
#
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
else:
PreUsage = CommentItemUsage
PreNotify = CommentItemNotify
PreHelpText = CommentItemHelpText
InfPpiItemObj.SetCommentList(CommentInsList)
return InfPpiItemObj
class InfPpiItemCommentContent():
def __init__(self):
#
# ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
# Help String
#
self.HelpStringItem = ''
self.Notify = ''
self.CommentList = []
def SetUsage(self, UsageItem):
self.UsageItem = UsageItem
def GetUsage(self):
return self.UsageItem
def SetNotify(self, Notify):
if Notify != DT.ITEM_UNDEFINED:
self.Notify = 'true'
def GetNotify(self):
return self.Notify
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
class InfPpiItem():
def __init__(self):
self.Name = ''
self.FeatureFlagExp = ''
self.SupArchList = []
self.CommentList = []
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
def SetCommentList(self, CommentList):
self.CommentList = CommentList
def GetCommentList(self):
return self.CommentList
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
##
#
#
#
class InfPpiObject():
def __init__(self):
self.Ppis = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
def SetPpi(self, PpiList, Arch = None):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem == None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
for Item in PpiList:
#
# Get Comment content of this protocol
#
CommentsList = None
if len(Item) == 3:
CommentsList = Item[1]
CurrentLineOfItem = Item[2]
Item = Item[0]
InfP
|
piItemObj = InfPpiItem()
if len(Item) >= 1 and len(Item) <= 2:
#
# Only CName contained
#
if not IsValidCVariableName(Item[0]):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
|
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
Extr
|
mojaves/convirt
|
tests/xmlfile_test.py
|
Python
|
lgpl-2.1
| 3,016
| 0
|
#
# Copyright 2015-2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import contextlib
import os
import uuid
import xml.etree.ElementTree as ET
import convirt
import convirt.config
import convirt.config.environ
import convirt.xmlfile
from . import testlib
class XMLFileTests(testlib.TestCase):
def setUp(self):
self.vm_uuid = str(uuid.uuid4())
@contextlib.contextmanager
|
def test_env(self):
with testlib.named_temp_dir() as tmp_dir:
with testlib.global_conf(run_dir=tmp_dir):
yield convirt.xmlfile.XMLFile(
self.vm_uuid,
convirt.config.environ.current()
)
def test_fails_without_conf(self):
self.assertRaises(convirt.xmlfile.UnconfiguredXML,
convirt.xmlfile.XMLFile,
self.vm_uuid,
|
None)
def test_path(self):
with self.test_env() as xf:
self.assertTrue(xf.path.endswith('xml'))
self.assertIn(self.vm_uuid, xf.path)
def test_save(self):
root = ET.fromstring(testlib.minimal_dom_xml())
with self.test_env() as xf:
conf = convirt.config.environ.current()
self.assertEquals(os.listdir(conf.run_dir), [])
self.assertNotRaises(xf.save, root)
self.assertTrue(len(os.listdir(conf.run_dir)), 1)
def test_load(self):
xml_data = testlib.minimal_dom_xml()
root = ET.fromstring(xml_data)
with self.test_env() as xf:
xf.save(root)
new_root = xf.load()
xml_copy = convirt.xmlfile.XMLFile.encode(new_root)
# FIXME: nasty trick to tidy up the XML
xml_ref = convirt.xmlfile.XMLFile.encode(root)
self.assertEquals(xml_ref, xml_copy)
def test_clear(self):
xml_data = testlib.minimal_dom_xml()
root = ET.fromstring(xml_data)
with self.test_env() as xf:
xf.save(root)
conf = convirt.config.environ.current()
self.assertTrue(len(os.listdir(conf.run_dir)), 1)
self.assertNotRaises(xf.clear)
self.assertEquals(os.listdir(conf.run_dir), [])
|
alexm92/sentry
|
src/sentry/options/store.py
|
Python
|
bsd-3-clause
| 10,280
| 0.000778
|
"""
sentry.options.store
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
from collections import namedtuple
from time import time
from random import random
from django.utils import timezone
from django.utils.functional import cached_property
from sentry.db.models.query import create_or_update
from sentry.utils.hashlib import md5_text
Key = namedtuple('Key', ('name', 'default', 'type', 'flags', 'ttl', 'grace', 'cache_key'))
CACHE_FETCH_ERR = 'Unable to fetch option cache for %s'
CACHE_UPDATE_ERR = 'Unable to update option cache for %s'
logger = logging.getLogger('sentry')
def _make_cache_key(key):
return 'o:%s' % md5_text(key).hexdigest()
def _make_cache_value(key, value):
now = int(time())
return (
value,
now + key.ttl,
now + key.ttl + key.grace,
)
class OptionsStore(object):
"""
Abstraction for the Option storage logic that should be driven
by the OptionsManager.
OptionsStore is gooey and raw. It provides no protection over
what goes into the store. It only knows that it's reading/writing
to the right place. If using the OptionsStore directly, it's your
job to do validation of the data. You should probably go through
OptionsManager instead, unless you need raw access to something.
"""
def __init__(self, cache=None, ttl=None):
self.cache = cache
self.ttl = ttl
self.flush_local_cache()
@cached_property
def model(self):
from sentry.models.option import Option
return Option
def make_key(self, name, default, type, flags, ttl, grace):
return Key(name, default, type, flags, int(ttl), int(grace), _make_cache_key(name))
def get(self, key, silent=False):
"""
Fetches a value from the options store.
"""
result = self.get_cache(key, silent=silent)
if result is not None:
return result
result = self.get_store(key, silent=silent)
if result is not None:
return result
# As a last ditch effort, let's hope we have a key
# in local cache that's possibly stale
return self.get_local_cache(key, force_grace=True)
def get_cache(self, key, silent=False):
"""
First check agaist our local in-process cache, falling
back to the network cache.
"""
value = self.get_local_cache(key)
if value is not None:
return value
if self.cache is None:
return None
cache_key = key.cache_key
try:
value = self.cache.get(cache_key)
except Exception:
if not silent:
logger.warn(CACHE_FETCH_ERR, key.name, exc_info=True)
value = None
else:
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
return value
def get_local_cache(self, key, force_grace=False):
"""
Attempt to fetch a key out of the local cache.
If the key exists, but is beyond expiration, we only
return it if grace=True. This forces the key to be returned
in a disaster scenario as long as we're still holding onto it.
This allows the OptionStore to pave over potential network hiccups
by returning a stale value.
"""
try:
value, expires, grace = self._local_cache[key.cache_key]
except KeyError:
return None
now = int(time())
# Key is within normal expiry window, so just return it
if now < expires:
return value
# If we're able to accept within grace window, return it
if force_grace and now < grace:
return value
# Let's clean up values if we're beyond grace.
if now > grace:
try:
del self._local_cache[key.cache_key]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
# In this case, it's also possible that another thread
# has updated the value at this key, causing us to evict
# it prematurely. This isn't ideal, but not terrible
# since I don't want to introduce locking to prevent this.
# Even if it did happen, the consequence is just another
# network hop.
pass
# If we're outside the grace window, even if we ask for it
# in grace, too bad. The value is considered bad.
return None
def get_store(self, key, silent=False):
"""
Attempt to fetch value from the database. If successful,
also set it back in the cache.
Returns None in both cases, if the key doesn't ac
|
tually exist,
or if we errored fetching it.
NOTE: This behavior should probably be improved to differentiate
between a miss vs error, but not
|
worth it now since the value
is limited at the moment.
"""
try:
value = self.model.objects.get(key=key.name).value
except self.model.DoesNotExist:
value = None
except Exception as e:
if not silent:
logger.exception(six.text_type(e))
value = None
else:
# we only attempt to populate the cache if we were previously
# able to successfully talk to the backend
# NOTE: There is definitely a race condition here between updating
# the store and the cache
try:
self.set_cache(key, value)
except Exception:
if not silent:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return value
def set(self, key, value):
"""
Store a value in the option store. Value must get persisted to database first,
then attempt caches. If it fails database, the entire operation blows up.
If cache fails, we ignore silently since it'll get repaired later by sync_options.
A boolean is returned to indicate if the network cache was set successfully.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.set_store(key, value)
return self.set_cache(key, value)
def set_store(self, key, value):
create_or_update(
model=self.model,
key=key.name,
values={
'value': value,
'last_updated': timezone.now(),
}
)
def set_cache(self, key, value):
if self.cache is None:
return None
cache_key = key.cache_key
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
try:
self.cache.set(cache_key, value, self.ttl)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def delete(self, key):
"""
Remove key out of option stores. This operation must succeed on the
database first. If database fails, an exception is raised.
If database succeeds, caches are then allowed to fail silently.
A boolean is returned to indicate if the network deletion succeeds.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.delete_store(key)
return self.delete_cache(key)
def delete_store(self, key):
self.model.objects.filter(key=key.name).delete()
def delete_cache(self, key):
cache_key = key.cache_key
try:
del self._local_cache[cache_key]
except KeyError:
pass
try:
self.cache.delete(cache_key)
return True
except
|
gav-/Nand2Tetris-Games_and_Demos
|
bin/pbm2jack.image.py
|
Python
|
gpl-3.0
| 6,471
| 0.007418
|
#!/usr/bin/env python3
"""pgm2jack
Convert a P1 pbm (monochome ascii, a.k.a. "Plain PBM") file to a
jack array (nand2tetris).
One word is 16bits in jack, so image width must be a multiple!
Copyright 2013-2016 Gavin Stewart.
You are required to give attribution to the author (Gavin Stewart) for any
use of this program (GPLv3 Section 7b).
Trying to pass off my code as your own in your Elements of Computing classes
will result in a cursed life of forever buggy software.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import os
import re
import math
class Parser ():
"""P1 PBM parser."""
def __init__ (self, filename):
if not os.path.exists(filename):
sys.exit('Parser Error: Input file not found: %s' % filename)
self._fh = open(filename, mode='r')
self._currentWord = 0
self._currentBit = 0;
self._nextLine = None
self._nextBit = ''
self._explodeLines = False
self.hasMoreLines()
search = re.search(r'^P1$', self._nextLine)
if not search: sys.exit('Failed to identify P1 type pbm')
self.hasMoreLines()
search = re.search(r'^(\d+)\s+(\d+)$', self._nextLine)
if not search: sys.exit('Failed to identify resolution')
self.width = int(search.group(1))
self.height = int(search.group(2))
if self.width%16 != 0: sys.exit('Width is not a multiple of 16')
self.width = self.width // 16
self._explodeLines = True
self.hasMoreLines()
def close(self):
"""Close input file"""
self._fh.close()
def hasMoreLines (self) :
# Find the next non-empty line if one exists.
line = None
for line in self._fh:
line = re.sub(r'#.*$', '', line) # Remove comments
line = re.sub(r'^\s+', '', line) # Remove leading whitespace
line = re.sub(r'\s+$', '', line) # Remove trailing whitespace
if len(line): break # Break if line not empty
if line:
if self._explodeLines:
self._nextLine = list(line)
else:
self._nextLine = line
return True
return False
def hasMoreBits (self) :
# Find the next bit.
if not len(self._nextLine):
if not self.hasMoreLines():
return False
# Shift first bit from string.
self._currentBit = self._nextLine.pop(0)
return True
def buildWord (self) :
"""buildWord - build a 16bit word MSB to LSB, left to right.
Returns a string of 16 bits.
"""
word = ''
word = self._currentBit
for i in range(1,16):
if self.hasMoreBits():
word = self._currentBit + word
else:
word = '0' + word
return word
def formatWord (bits) :
"""Format input bit string as a 16bit 2s complement Jack word.
Returns formatted string suitable for use as a literal value in Jack.
"""
val = int(bits, base=2)
if val > (2**15)-1 : # Ensure 2s complement 16bit int.
val = -(2**15 - (val - 2**15)) # e.g. 65528: 1111111111111000
# 32768-(65528-32768) == 8
# which we negate: -8
if val == -32768:
# A limitation in the Jack compiler prevents the maximum negative number
# as a literal, so we have to add an operation to get the value.
return "-32767-1"
else:
return "{0}".format(val)
### Main ###
if len(sys.argv) < 3:
sys.exit('Usage: %s <class name> <file.pbm>' % sys.argv[0])
className = sys.argv[1]
infile = sys.argv[2]
filename, ext = os.path.splitext(infile)
if ext != '.pbm':
sys.exit("Expected file extension .pbm")
outfile = className + '.jack'
outfh = open(outfile, 'w')
parser = Parser(infile)
# Initialise data array rounded up to nearest multiple of 16, since we push
# 16 words at a time into bitmap.
data = [0] * int(math.ceil(parser.width * parser.height / 16) * 16);
### Class declaration
classHeader="""/**
* Static Image factory class.
*
* Requires Image.jack
*
* Generated from "{filename}" by pbm2jack.py
*
* Copyright 2013 Gavin Stewart.
*/
class {cn} {{
/**
* newImage - returns an Image object containing bitmap data.
*/
function Image newImage () {{
var Image i;
var int width, height;
let width = {width};
let height = {height};
let i = Image.newBitmap(width, height);
"""
classFooter="""
return i;
}}
}}
"""
outfh.write(classHeader.format(filename = os.path.basename(infile),
cn
|
= className,
width = parser.width,
height = parser.height))
index = 0
while parser.hasMoreBits():
bitWord = parser.buildWord()
data[index] = formatWord(bitWord);
index += 1
index = 0
while index < len(data):
outfh.write(" do i.push({0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{1
|
3},{14},{15});\n".
format(
data[index],
data[index + 1],
data[index + 2],
data[index + 3],
data[index + 4],
data[index + 5],
data[index + 6],
data[index + 7],
data[index + 8],
data[index + 9],
data[index + 10],
data[index + 11],
data[index + 12],
data[index + 13],
data[index + 14],
data[index + 15],
)
)
index += 16
outfh.write(classFooter.format())
parser.close()
outfh.close()
|
PaddlePaddle/Paddle
|
python/paddle/fluid/tests/custom_op/custom_relu_setup.py
|
Python
|
apache-2.0
| 1,325
| 0
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from utils import paddle_includes, extra_compile_args, IS_MAC
from paddle.utils.cpp_extension import CUDAExtension, setup, CppExtension
# Mac-CI don't support GPU
Extension = CppExtension if IS_MAC else CUDAExtension
sources = ['custom_relu_op.cc', 'custom_relu_op_dup.cc']
if not IS_MAC:
sources.append('custom_relu_op.cu')
# custom_relu_op_dup.cc is only used for multi ops test,
#
|
not a new op, if you want to test only one op, remove this
# source file
setup(
name='custom_relu_module_setup',
ext_modules=Extension( # test for not specific name here.
sources=sources, # test for multi ops
include_dirs=paddle_includes,
ext
|
ra_compile_args=extra_compile_args))
|
saltstack/salt
|
doc/_ext/saltrepo.py
|
Python
|
apache-2.0
| 498
| 0.002008
|
"""
saltrepo
~~~~~~~~
SaltStack Repository Sphinx directives
"""
def source_read_handler(app, docname, source):
if "|repo_primary_branch|
|
" in source[0]:
source[0] = source[0].replace(
"|repo_primary_branch|", app.config.html_context["repo_primary_branch"]
)
def setup(app):
app.connect("source-read", source_read_handler)
return {
"version": "builtin",
"parallel_read_safe": True,
"parallel_write
|
_safe": True,
}
|
ericfourrier/pwnedapi
|
pwnedapi/__init__.py
|
Python
|
mit
| 72
| 0
|
__al
|
l__ = ['pwnedapi', 'utils']
from .pwnedapi
|
import HaveIBeenPwnedApi
|
GarmanGroup/RABDAM
|
tests/test_bnet_calculation.py
|
Python
|
lgpl-3.0
| 8,856
| 0.000452
|
# RABDAM
# Copyright (C) 2020 Garman Group, University of Oxford
# This file is part of RABDAM.
# RABDAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
# RABDAM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General
# Public License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
# An outer layer to the pipeline scripts. Depending upon the flags specified
# in the command line input, this script will run either the complete / a
# subsection of the pipeline.
# python -m unittest tests/test_bnet_calculation.py
import os
import unittest
from rabdam.Subroutines.CalculateBDamage import rabdam
class TestClass(unittest.Te
|
stCase):
def test_bnet_values(self):
"""
Checks that RABDAM calculates expected Bnet values for a selection of
PDB entries
"""
import os
import requests
import shutil
import pandas as pd
exp_bnet_dict = {'2O2X': 3.300580966,
'4EZF': 3.193514624,
'4MWU': 3.185476349,
'4MOV'
|
: 3.144130191,
'3NBM': 3.141821366,
'1GW1': 3.105626889,
'4EWE': 3.08241654,
'3F1P': 3.060628186,
'3IV0': 3.054440912,
'4ZWV': 3.017330004,
'1T2I': 3.004830448,
'3LX3': 2.962424378,
'5P4N': 2.916582486,
'5MAA': 2.91219352,
'1E73': 2.850203561,
'1YKI': 2.797739814,
'4WA4': 2.720540993,
'3V2J': 2.669599635,
'3CUI': 2.666605946,
'4XLA': 2.624366813,
'4DUK': 2.854175949,
'3V38': 2.500984382,
'1VJF': 2.496374854,
'5IO2': 2.467587911,
'5CM7': 2.44869046,
'2EHU': 2.448290431,
'5JOW': 2.439619791,
'2C54': 2.379224017,
'4GZK': 2.349526276,
'2NUM': 2.326904729,
'5FYO': 2.319618192,
'4ODK': 2.304354685,
'6EV4': 2.302433369,
'5P5U': 2.288966997,
'3VHV': 2.285877338,
'4JCK': 2.27150332,
'5EKM': 2.258574341,
'3H4O': 2.231817033,
'5JIG': 2.247664542,
'2H5S': 2.206850226,
'4M5I': 2.169405117,
'1Y59': 2.138787261,
'4C45': 2.131256276,
'5F90': 2.11287042,
'4NI3': 2.088735516,
'4Z6N': 2.083743584,
'5M2G': 2.06566475,
'5ER6': 2.05707889,
'4R0X': 2.006996308,
'5LLG': 1.981501196,
'1FCX': 1.976990791,
'5M90': 1.96542442,
'3NJK': 1.955577757,
'5CWG': 1.949818624,
'2P7O': 1.921138477,
'5SZC': 1.962633169,
'2I0K': 1.901555841,
'4RDK': 1.886900766,
'5MA0': 1.877853781,
'4C1E': 1.877575448,
'5EJ3': 1.875439995,
'2WUG': 1.87334953,
'4MPY': 1.842338963,
'4OTZ': 1.835716553,
'4IOO': 1.828349113,
'4Z6O': 1.800528596,
'4ZOT': 1.799163077,
'5PHB': 1.783879628,
'3UJC': 1.747894856,
'4FR8': 1.738876799,
'5PH8': 1.736825591,
'5UPM': 1.736663507,
'3MWX': 1.733132746,
'4KDX': 1.729650659,
'3WH5': 1.717975404,
'4P04': 1.714107945,
'5Y90': 1.695283923,
'4H31': 1.674014779,
'5HJE': 1.662869176,
'4YKK': 1.653894709,
'1Q0F': 1.646880018,
'5JP6': 1.629246723,
'1X7Y': 1.618817315,
'4ZC8': 1.60606196,
'5EPE': 1.604407869,
'4ZS9': 1.582398487,
'5VNX': 1.543824945,
'5IHV': 1.542271159,
'5J90': 1.526469901,
'4K6W': 1.520316883,
'3PBC': 1.512738972,
'5CMB': 1.504620762,
'4PSC': 1.491796934,
'5UPN': 1.477252783,
'4XLZ': 1.473298738,
'4XGY': 1.465885549,
'5M4G': 1.400219288,
'3A54': 1.319587779}
if not os.path.isdir('tests/temp_files/'):
os.mkdir('tests/temp_files/')
for code, exp_bnet in exp_bnet_dict.items():
# Checks cif file
cif_text = requests.get('https://files.rcsb.org/view/%s.cif' % code)
with open('tests/temp_files/%s.cif' % code, 'w') as f:
f.write(cif_text.text)
rabdam_run = rabdam(
pathToInput='%s/tests/temp_files/%s.cif' % (os.getcwd(), code),
outputDir='%s/tests/temp_files/' % os.getcwd(),
batchRun=True,
overwrite=True,
PDT=7,
windowSize=0.02,
protOrNA='protein',
HETATM=False,
removeAtoms=[],
addAtoms=[],
highlightAtoms=[],
createOrigpdb=False,
createAUpdb=False,
createUCpdb=False,
createAUCpdb=False,
createTApdb=False
)
rabdam_run.rabdam_dataframe(test=True)
rabdam_run.rabdam_analysis(
output_options=['csv', 'pdb', 'cif', 'kde', 'bnet', 'summary']
)
bnet_df = pd.read_pickle('tests/temp_files/Logfiles/Bnet_protein.pkl')
act_bnet_cif = bnet_df['Bnet'].tolist()[-1]
self.assertEqual(round(exp_bnet, 7), round(act_bnet_cif, 7))
os.remove('tests/temp_files/%s.cif' % code)
os.remove('tests/temp_files/Logfiles/Bnet_protein.pkl')
# Checks PDB file
pdb_text = requests.get('https://files.rcsb.org/view/%s.pdb' % code)
with open('tests/temp_files/%s.pdb' % code, 'w') as f:
f.write(pdb_text.text)
rabdam_run = rabdam(
pathToInput='%s/tests/temp_files/%s.pdb' % (os.getcwd(), code),
outputDir='%s/tests/temp_files/' % os.getcwd(),
batchRun=True,
overwrite=True,
PDT=7,
windowSize=0.02,
protOrNA='protein',
HETATM=False,
removeAtoms=[],
addAtoms=[],
highlightAtoms=[],
createOrigpdb=False,
createAUpdb=False,
createUCpdb=False,
cre
|
vrooje/gzcandels_datapaper
|
plotting/plot_mags_z_thresholds.py
|
Python
|
mit
| 11,858
| 0.00818
|
import sys
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.ticker
import numpy as np
#import pandas as pd
import astropy
from astropy.table import Table
from scipy import stats, interpolate, special
from scipy.stats import gaussian_kde
infile = 'candels_restframe_phot_weighted_classifications.fits'
fsmooth = 't00_smooth_or_featured_a0_smooth_weighted_frac'
ffeatured = 't00_smooth_or_featured_a1_features_weighted_frac'
fartifact = 't00_smooth_or_featured_a2_artifact_weighted_frac'
print("Reading file %s ...\n" % infile)
gz_all = Table.read(infile)
print(" ... done.")
threshold = 0.02 # 1 - threshold is the %ile to plot a contour at
threshold_all = 0.02 # We will plot the outer <--- fraction of points
threshold_col = 0.01 # We will plot the outer <--- fraction of points
bins = 60
bins_all = 60
okdata = np.invert(np.isnan(gz_all['z_best'])) & np.invert(np.isnan(gz_all['UX_rest'])) & np.invert(np.isnan(gz_all['V_rest'])) & np.invert(np.isnan(gz_all['J_rest']))
gz = gz_all[okdata]
not_artifact = gz[ffeatured] < 0.5
Vzlims = (gz['z_best'] > 0.0) & (gz['z_best'] < 4.1) & (gz['V_rest'] > -25.5) & (gz['V_rest'] < -9.5) & not_artifact
# so that we can plot points without overplotting duplicates
smooth04 = (gz[fsmooth] >= 0.4) & (gz[fsmooth] < 0.5)
smooth05 = (gz[fsmooth] >= 0.5) & (gz[fsmooth] < 0.6)
smooth06 = (gz[fsmooth] >= 0.6) & (gz[fsmooth] < 0.7)
smooth07 = (gz[fsmooth] >= 0.7) & (gz[fsmooth] < 0.8)
smooth08 = (gz[fsmooth] >= 0.8)
# for contour purposes
smooth04a = (gz[fsmooth] >= 0.4)
smooth05a = (gz[fsmooth] >= 0.5)
smooth06a = (gz[fsmooth] >= 0.6)
smooth07a = (gz[fsmooth] >= 0.7)
smooth08a = smooth08
featured04 = (gz[ffeatured] >= 0.4) & (gz[ffeatured] < 0.5)
featured05 = (gz[ffeatured] >= 0.5) & (gz[ffeatured] < 0.6)
featured06 = (gz[ffeatured] >= 0.6) & (gz[ffeatured] < 0.7)
featured07 = (gz[ffeatured] >= 0.7)
featured04a = (gz[ffeatured] >= 0.4)
featured05a = (gz[ffeatured] >= 0.5)
featured06a = (gz[ffeatured] >= 0.6)
featured07a = featured07
# get contours from KDE kernels
# all data
all_pts = np.vstack([np.array(gz['z_best'][Vzlims]), np.array(gz['V_rest'][Vzlims])])
kde_all = gaussian_kde(all_pts)
kde_all = gaussian_kde(all_pts, bw_method=kde_all.scotts_factor()/2.)
z_all = kde_all(all_pts)
x = np.ma.masked_where(z_all > threshold_all, np.array(gz['z_best'][Vzlims]))
y = np.ma.masked_where(z_all > threshold_all, np.array(gz['V_rest'][Vzlims]))
#smooth - Vz
smooth_pts04 = np.vstack([np.array(gz['z_best'][smooth04a]), np.array(gz['V_rest'][smooth04a])])
smooth_pts05 = np.vstack([np.array(gz['z_best'][smooth05a]), np.array(gz['V_rest'][smooth05a])])
smooth_pts06 = np.vstack([np.array(gz['z_best'][smooth06a]), np.array(gz['V_rest'][smooth06a])])
smooth_pts07 = np.vstack([np.array(gz['z_best'][smooth07a]), np.array(gz['V_rest'][smooth07a])])
smooth_pts08 = np.vstack([np.array(gz['z_best'][smooth08a]), np.array(gz['V_rest'][smooth08a])])
kde_s04 = gaussian_kde(smooth_pts04, bw_method=kde_s04.scotts_factor()/2.)
kde_s05 = gaussian_kde(smooth_pts05, bw_method=kde_s05.scotts_factor()/2.)
kde_s06 = gaussian_kde(smooth_pts06, bw_method=kde_s06.scotts_factor()/2.)
kde_s07 = gaussian_kde(smooth_pts07, bw_method=kde_s07.scotts_factor()/2.)
kde_s08 = gaussian_kde(smooth_pts08, bw_method=kde_s08.scotts_factor()/2.)
z_s04 = kde_s04(smooth_pts04)
z_s05 = kde_s05(smooth_pts05)
z_s06 = kde_s06(smooth_pts06)
z_s07 = kde_s07(smooth_pts07)
z_s08 = kde_s08(smooth_pts08)
# mask points above density threshold
x_s04 = np.ma.masked_where(z_s04 > threshold, np.array(gz['z_best'][smooth04a]))
y_s04 = np.ma.masked_where(z_s04 > threshold, np.array(gz['V_rest'][smooth04a]))
x_s05 = np.ma.masked_where(z_s05 > threshold, np.array(gz['z_best'][smooth05a]))
y_s05 = np.ma.masked_where(z_s05 > threshold, np.array(gz['V_rest'][smooth05a]))
x_s06 = np.ma.masked_where(z_s06 > threshold, np.array(gz['z_best'][smooth06a]))
y_s06 = np.ma.masked_where(z_s06 > threshold, np.array(gz['V_rest'][smooth06a]))
x_s07 = np.ma.masked_where(z_s07 > threshold, np.array(gz['z_best'][smooth07a]))
y_s07 = np.ma.masked_where(z_s07 > threshold, np.array(gz['V_rest'][smooth07a]))
x_s08 = np.ma.masked_where(z_s08 > threshold, np.array(gz['z_best'][smooth08a]))
y_s08 = np.ma.masked_where(z_s08 > threshold, np.array(gz['V_rest'][smooth08a]))
# featured - Vz
featured_ptf04 = np.vstack([np.array(gz['z_best'][featured04a]), np.array(gz['V_rest'][featured04a])])
featured_ptf05 = np.vstack([np.array(gz['z_best'][featured05a]), np.array(gz['V_rest'][featured05a])])
featured_ptf06 = np.vstack([np.array(gz['z_best'][featured06a]), np.array(gz['V_rest'][featured06a])])
featured_ptf07 = np.vstack([np.array(gz['z_best'][featured07a]), np.array(gz['V_rest'][featured07a])])
kde_f04 = gaussian_kde(featured_ptf04, bw_method=kde_f04.scotts_factor()*.8)
kde_f05 = gaussian_kde(featured_ptf05, bw_method=kde_f05.scotts_factor()*.8)
kde_f06 = gaussian_kde(featured_ptf06, bw_method=kde_f06.scotts_factor()*.8)
kde_f07 = gaussian_kde(featured_ptf07, bw_method=kde_f07.scotts_factor()*.8)
z_f04 = kde_f04(featured_ptf04)
z_f05 = kde_f05(featured_ptf05)
z_f06 = kde_f06(featured_ptf06)
z_f07 = kde_f07(featured_ptf07)
# mask points above density threshold
x_f04 = np.ma.masked_where(z_f04 > threshold, np.array(gz['z_best'][featured04a]))
y_f04 = np.ma.masked_where(z_f04 > threshold, np.array(gz['V_rest'][featured04a]))
x_f05 = np.ma.masked_where(z_f05 > threshold, np.array(gz['z_best'][featured05a]))
y_f05 = np.ma.masked_where(z_f05 > threshold, np.array(gz['V_rest'][featured05a]))
x_f06 = np.ma.masked_where(z_f06 > threshold, np.array(gz['z_best'][featured06a]))
y_f06 = np.ma.masked_where(z_f06 > threshold, np.array(gz['V_rest'][featured06a]))
x_f07 = np.ma.masked_where(z_f07 > threshold, np.array(gz['z_best'][featured07a]))
y_f07 = np.ma.masked_where(z_f07 > threshold, np.array(gz['V_rest'][feat
|
ured07a]))
colall = '#AAAAAA'
col04 =
|
'#006e35'
col05 = '#4455CC'
col06 = '#30a0ca'
#col07 = '#00ccaE'
col07 = "#ac0e30"
colclean = "#ac0e30"
sty04 = 'dashed'
sty05 = 'dotted'
sty06 = 'dashdot'
sty07 = 'solid'
styclean = 'solid'
fig = plt.figure(figsize=(10, 4))
gs = gridspec.GridSpec(1,2)
#gs.update(hspace=0.25, wspace=0.001)
zaxis = (0.0, 4.)
Vaxis = (-24.2, -10.)
ax1 = fig.add_subplot(gs[0,0])
ax1.set_xlim(zaxis)
ax1.set_ylim(Vaxis)
ax1.invert_yaxis()
# it should be ax1 not plt below but if I do that I can't get the colorbar to work
#plt.hexbin(gz['z_best'][Vzlims], gz['V_rest'][Vzlims], gridsize=25, bins='log', cmap='Greys', label='_nolegend_')
# plot unmasked points
ax1.scatter(x, y, c=colall, marker='.', edgecolor='None')
# get bounds from axes
# this is a bit silly as we've already defined them above, but just in case
# you need this for some other purpose later you'll maybe find this in a search
xmin, xmax = ax1.get_xlim()
ymin, ymax = ax1.get_ylim()
#xmin = -.2
# prepare grid for density map
xedges = np.linspace(xmin, xmax, bins)
yedges = np.linspace(ymin, ymax, bins)
xx, yy = np.meshgrid(xedges, yedges)
gridpoints = np.array([xx.ravel(), yy.ravel()])
# compute density maps
zz = np.reshape(kde_all(gridpoints), xx.shape)
zz_s04 = np.reshape(kde_s04(gridpoints), xx.shape)
zz_s05 = np.reshape(kde_s05(gridpoints), xx.shape)
zz_s06 = np.reshape(kde_s06(gridpoints), xx.shape)
zz_s07 = np.reshape(kde_s07(gridpoints), xx.shape)
zz_s08 = np.reshape(kde_s08(gridpoints), xx.shape)
# plot density map
im1 = ax1.imshow(zz, cmap='Greys', interpolation='nearest', origin='lower', extent=[xmin, xmax, ymin, ymax], aspect='auto')
ax1.contour(xx, yy, zz, levels=[threshold_all], colors=colall, linestyles='solid', label = '_nolegend_')
# plot threshold contour
#ax1.contour(xx, yy, zz_s04, levels=[threshold], colors=col04, linestyles=sty04, label = '$f_{\\rm smooth} \\geq 0.4$', lineweights=2)
cs04 = ax1.contour(xx, yy, zz_s04, levels=[threshold], colors=col04, linestyles=sty04, label = '$f_{\\rm smooth} \\geq 0.4$', lineweights=2)
cs05 = ax1.contour(xx, yy, zz_s05, levels=[threshold], colors=col05, linestyles=sty05, label = '$f_{\\rm smooth} \\geq 0.5$', lineweights=2)
cs06 = ax1.contour(xx, yy, zz_s06, levels=[threshold], colors=col06, linestyles=
|
Outernet-Project/bottle-streamline
|
tests/sample_apps/base_only.py
|
Python
|
bsd-2-clause
| 488
| 0
|
from streamline import RouteBase
class MyRoute(RouteBase):
path = '/'
def get(self):
self.response.headers['foo'] = 'bar'
return 'Hello world!'
class MyOtherRoute(RouteBase):
path = '/other'
def post(self):
return 'Posted'
def delete(self):
return 'Deleted'
|
def patch(self):
ret
|
urn 'Patched'
def main():
MyRoute.route()
MyOtherRoute.route()
MyRoute.bottle.run()
if __name__ == '__main__':
main()
|
mmuzila/errata-tool
|
errata_tool/tests/conftest.py
|
Python
|
mit
| 2,567
| 0
|
import json
import os
from errata_tool import ErrataConnector, Erratum
from errata_tool.products import ProductList
import requests
import pytest
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
FIXTURES_DIR = os.path.join(TESTS_DIR, 'fixtures')
class Mo
|
ckResponse(object):
status_code = 200
encoding = 'utf-8'
headers = {'content-type': 'application/json; charset=utf-8'}
def raise_for_status(self):
pass
@property
def _fixture(self):
""" Return pa
|
th to our static fixture file. """
return self.url.replace('https://errata.devel.redhat.com/',
os.path.join(FIXTURES_DIR,
'errata.devel.redhat.com/'))
def json(self):
try:
with open(self._fixture) as fp:
return json.load(fp)
except IOError:
print('Try ./new-fixture.sh %s' % self.url)
raise
@property
def text(self):
""" Return contents of our static fixture file. """
try:
with open(self._fixture) as fp:
return fp.read()
except IOError:
print('Try ./new-fixture.sh %s' % self.url)
raise
class RequestRecorder(object):
""" Record args to requests.get() or requests.post() """
def __call__(self, url, **kwargs):
""" mocking requests.get() or requests.post() """
self.response = MockResponse()
self.response.url = url
self.kwargs = kwargs
return self.response
@pytest.fixture
def mock_get():
return RequestRecorder()
@pytest.fixture
def mock_post():
return RequestRecorder()
@pytest.fixture
def mock_put():
return RequestRecorder()
@pytest.fixture
def advisory(monkeypatch, mock_get):
monkeypatch.delattr('requests.sessions.Session.request')
monkeypatch.setattr(ErrataConnector, '_auth', None)
monkeypatch.setattr(requests, 'get', mock_get)
return Erratum(errata_id=26175)
@pytest.fixture
def rhsa(monkeypatch, mock_get):
""" Like the advisory() fixture above, but an RHSA. """
monkeypatch.delattr('requests.sessions.Session.request')
monkeypatch.setattr(ErrataConnector, '_auth', None)
monkeypatch.setattr(requests, 'get', mock_get)
return Erratum(errata_id=25856)
@pytest.fixture
def productlist(monkeypatch, mock_get):
monkeypatch.delattr('requests.sessions.Session.request')
monkeypatch.setattr(ErrataConnector, '_auth', None)
monkeypatch.setattr(requests, 'get', mock_get)
return ProductList()
|
weixsong/algorithm
|
leetcode/137.py
|
Python
|
mit
| 614
| 0.008143
|
# -*- encoding: utf-8 -*-
'''
Given an
|
array of integers, every element appears three times except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
'''
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
count = {}
for num in nums:
if num not in count:
count[num] = 0
|
count[num] += 1
for key in count:
if count[key] != 3:
return key
|
fearthecowboy/pygments
|
Pygments/pygments-lib/pygments/lexers/_mapping.py
|
Python
|
bsd-2-clause
| 45,831
| 0.007571
|
# -*- coding: utf-8 -*-
"""
pygments.lexers._mapping
~~~~~~~~~~~~~~~~~~~~~~~~
Lexer mapping definitions. This file is generated by itself. Everytime
you change something on a builtin lexer definition, run this script from
the lexers folder to update it.
Do not alter the LEXERS dictionary by hand.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
LEXERS = {
'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()),
'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
|
'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx',
|
'*.asmx', '*.axd'), ()),
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml
|
yancz1989/lr_semi
|
common/util.py
|
Python
|
mit
| 1,092
| 0.016484
|
import subprocess
import os
import errno
def download_file(url, local_fname=None, force_write=False):
# requests is not default installed
import requests
if local_fname is None:
local_fname = url.split('/')[-1]
if not force_write and os.pa
|
th.exists(local_fname):
return local_fname
dir_name = os.path.dirname(local_fname)
if dir_name != "":
if not os.path.exists(dir_name):
try: # try to create the directory if it doesn't exists
os.makedirs(dir_name)
except OSError as exc:
if exc.errn
|
o != errno.EEXIST:
raise
r = requests.get(url, stream=True)
assert r.status_code == 200, "failed to open %s" % url
with open(local_fname, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
return local_fname
def get_gpus():
"""
return a list of GPUs
"""
try:
re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True)
except OSError:
return []
return range(len([i for i in re.split('\n') if 'GPU' in i]))
|
vzer/ToughRADIUS
|
toughradius/radiusd/plugins/auth_user_filter.py
|
Python
|
agpl-3.0
| 785
| 0.017834
|
#!/usr/bin/env python
#coding=utf-8
from toughradius.radiusd.plugins import error_auth
from toughradius.radiusd import utils
def process(req=None,resp=None,user=None,radiusd=None,**kwargs):
store = radiusd.store
if store.is_white_roster(req.get_mac_addr()):
return resp
if not user:
return error_auth(resp,'u
|
ser %s not exists'%req.get_user_name())
if store.get_param("radiusd_bypass") == '1':
|
if not req.is_valid_pwd(utils.decrypt(user['password'])):
return error_auth(resp, 'user password not match')
if user['status'] == 4:
resp['Framed-Pool'] = store.get_param("expire_addrpool")
return resp
if user['status'] in (0,2,3):
return error_auth(resp,'user status not ok')
return resp
|
codingsnippets/Gooey
|
gooey/gui/containers/application.py
|
Python
|
mit
| 8,890
| 0.002362
|
"""
Primary orchestration and control point for Gooey.
"""
import sys
from itertools import chain
import wx
from gooey.gui import events
from gooey.gui.components.header import FrameHeader
from gooey.gui.components.footer import Footer
from gooey.gui.util import wx_util
from gooey.gui.components.config import ConfigPage, TabbedConfigPage
from gooey.gui.components.sidebar import Sidebar
from gooey.gui.components.tabbar import Tabbar
from gooey.util.functional import getin, assoc, flatmap, compact
from gooey.python_bindings import constants
from gooey.gui.pubsub import pub
from gooey.gui import cli
from gooey.gui.components.console import Console
from gooey.gui.lang.i18n import _
from gooey.gui.processor import ProcessController
from gooey.gui.util.wx_util import transactUI
from gooey.gui.components import modals
from gooey.gui import seeder
class GooeyApplication(wx.Frame):
"""
Main window for Gooey.
"""
def __init__(self, buildSpec, *args, **kwargs):
super(GooeyApplication, self).__init__(None, *args, **kwargs)
self._state = {}
self.buildSpec = buildSpec
self.header = FrameHeader(self, buildSpec)
self.configs = self.buildConfigPanels(self)
self.navbar = self.buildNavigation()
self.footer = Footer(self, buildSpec)
self.console = Console(self, buildSpec)
self.layoutComponent()
self.clientRunner = ProcessController(
self.buildSpec.get('progress_regex'),
self.buildSpec.get('progress_expr'),
self.buildSpec.get('encoding')
)
pub.subscribe(events.WINDOW_START, self.onStart)
pub.subscribe(events.WINDOW_RESTART, self.onStart)
pub.subscribe(events.WINDOW_STOP, self.onStopExecution)
pub.subscribe(events.WINDOW_CLOSE, self.onClose)
pub.subscribe(events.WINDOW_CANCEL, self.onCancel)
pub.subscribe(events.WINDOW_EDIT, self.onEdit)
pub.subscribe(events.CONSOLE_UPDATE, self.console.logOutput)
pub.subscribe(events.EXECUTION_COMPLETE, self.onComplete)
pub.subscribe(events.PROGRESS_UPDATE, self.footer.updateProgressBar)
# Top level wx close event
self.Bind(wx.EVT_CLOSE, self.onClose)
if self.buildSpec['poll_external_updates']:
self.fetchExternalUpdates()
if self.buildSpec.get('auto_start', False):
self.onStart()
def onStart(self, *args, **kwarg):
"""
Verify user input and kick off the client's program if valid
"""
with transactUI(self):
config = self.navbar.getActiveConfig()
config.resetErrors()
if config.isValid():
self.clientRunner.run(self.buildCliString())
self.showConsole()
else:
config.displayErrors()
self.Layout()
def onEdit(self):
"""Return the user to the settings screen for further editing"""
with transactUI(self):
if self.buildSpec['poll_external_updates']:
self.fetchExternalUpdates()
self.showSettings()
def buildCliString(self):
"""
Collect all of the required information from the config screen and
build a CLI string which can be used to invoke the client program
"""
config = self.navbar.getActiveConfig()
group = self.buildSpec['widgets'][self.navbar.getSelectedGroup()]
positional = config.getPositionalArgs()
optional = config.getOptionalArgs()
print(cli.buildCliString(
self.buildSpec['target'],
group['command'],
positional,
optional
))
return cli.buildCliString(
self.buildSpec['target'],
group['command'],
positional,
optional
)
def onComplete(self, *args, **kwargs):
"""
Display the appropriate screen based on the success/fail of the
host program
"""
with transactUI(self):
if self.clientRunner.was_success():
if self.buildSpec.get('return_to_config', False):
self.showSettings()
else:
self.showSuccess()
if self.buildSpec.get('show_success_modal', True):
wx.CallAfter(modals.showSuccess)
else:
if self.clientRunner.wasForcefullyStopped:
self.showForceStopped()
else:
self.showError()
wx.CallAfter(modals.showFailure)
def onStopExecution(self):
"""Displays a scary message and then force-quits the executing
client code if the user accepts"""
if self.buildSpec['show_stop_warning'] and modals.confirmForceStop():
self.clientRunner.stop()
def fetchExternalUpdates(self):
"""
!Experimental!
Calls out to the client code requesting seed values to use in the UI
!Experimental!
"""
seeds = seeder.fetchDynamicProperties(
self.buildSpec['target'],
self.buildSpec['encoding']
)
for config in self.configs:
config.seedUI(seeds)
def onCancel(self):
"""Close the program after confirming"""
if modals.confirmExit():
self.onClose()
def onClose(self, *args, **kwargs):
"""Cleanup the top level WxFrame and shutdown the process"""
self.Destroy()
sys.exit()
def layoutComponent(self):
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.header, 0, wx.EXPAND)
sizer.Add(wx_util.horizontal_rule(self), 0, wx.EXPAND)
sizer.Add(self.navbar, 1, wx.EXPAND)
sizer.Add(self.console, 1, wx.EXPAND)
sizer.Add(wx_util.horizontal_rule(self), 0, wx.EXPAND)
sizer.Add(self.footer, 0, wx.EXPAND)
self.SetMinSize((400, 300))
self.SetSize(self.buildSpec['default_size'])
self.SetSizer(sizer)
self.console.Hide()
self.Layout()
self.SetIcon(wx.Icon(self.buildSpec['images']['programIcon'], wx.BITMAP_TYPE_ICO))
def buildNavigation(self):
"""
Chooses the appropriate layout navigation component based on user prefs
"""
if self.buildSpec['navigation'] == constants.TABBED:
navigation = Tabbar(self, self.buildSpec, self.configs)
else:
navigation = Sidebar(self, self.buildSpec, self.configs)
if self.buildSpec['navigation'] == constants.HIDDEN:
navigation.Hide()
return navigation
def buildConfigPanels(self, parent):
page_class = TabbedConfigPage if self.buildSpec['tabbed_groups'] else ConfigPage
return [page_class(parent, widgets)
for widgets in self.buildSpec['widgets'].values()]
def showSettings(self):
self.navbar.Show(True)
self.console.Show(False)
|
self.header.setImage('settings_img')
self.header.setTitle(_("settings_title"))
self.header.setSubtitle(self.buildSpec['program_description'])
self.footer.showButtons('cancel_button', 'start_button')
self.footer.progress_bar.Show(False)
def showConsole(self):
self.navbar.Show(False)
self.console.Show(True)
self.header.setImage('running_img')
|
self.header.setTitle(_("running_title"))
self.header.setSubtitle(_('running_msg'))
self.footer.showButtons('stop_button')
self.footer.progress_bar.Show(True)
if not self.buildSpec['progress_regex']:
self.footer.progress_bar.Pulse()
def showComplete(self):
self.navbar.Show(False)
self.console.Show(True)
self.footer.showButtons('edit_button', 'restart_button', 'close_button')
self.footer.progress_bar.Show(False)
|
Tritlo/Stundatafla
|
StundatofluUtil.py
|
Python
|
gpl-2.0
| 246
| 0.00813
|
def getLocations(listrep, checkFunc):
t = listrep
locs = []
for i in range(len(t)):
for j in range(len(t[0])):
locs.append((i,j))
places = filter(lambda (i,j): checkFunc(listrep[i][j]), locs)
return place
|
s
|
|
tiberiucorbu/av-website
|
main/model/__init__.py
|
Python
|
mit
| 314
| 0
|
# coding: utf-8
from .base import Base
from .config_auth import ConfigAuth
from .config import Config
from .user
|
import User
from .resource import Resource
from .common import VisibilityFlags
from .meta import ItemMeta
from .meta import PageMeta
from .story import Story
from .module_config i
|
mport ModuleConfig
|
xguse/outspline
|
src/outspline/info/extensions/organism_alarms.py
|
Python
|
gpl-3.0
| 1,392
| 0.000718
|
# Outspline - A highly modular and extensible outliner.
# Copyright (C) 2011-2014 Dario Giovannetti <dev@dariogiovannetti.net>
#
# This file is part of Outspline.
#
# Outspline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Outspline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outspline. If not, see <http://www.gnu.org/licenses/>.
authors = ("Dario Giovannetti <dev@dariogiovannetti.net>", )
version = "1.3"
description = "Adds the backend for managing alarm events."
website = "ht
|
tps://kynikos.github.io/outspline/"
affects_database = True
provides_tables = ("AlarmsProperties", "Alarms", "CopyAlarms", "AlarmsOffLog")
dependencies = (("core", 4), ("extensions.organism", 2),
("extens
|
ions.organism_timer", 1))
optional_dependencies = (("extensions.copypaste", 2), )
database_dependency_group_1 = (("core", 4), ("extensions.organism", 2),
("extensions.organism_timer", 1), ("extensions.organism_alarms", 1))
|
acsone/margin-analysis
|
product_cost_incl_costs_with_bom/product.py
|
Python
|
agpl-3.0
| 2,373
| 0.00295
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright 2012 Camptocamp SA
# Copyright 2012 Endian Solutions BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Af
|
fero General Public License for more details.
#
# You sho
|
uld have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv.orm import Model
from openerp.osv import fields
import decimal_precision as dp
class product_product(Model):
_inherit = 'product.product'
def _cost_price(self, cr, uid, ids, field_name, arg, context=None):
if context is None:
context = {}
product_uom = context.get('product_uom')
bom_properties = context.get('properties')
res = self._compute_purchase_price(cr, uid, ids, product_uom,
bom_properties, context=context)
for self_obj in self.browse(cr, uid, ids, context=context):
res[self_obj.id] = res[self_obj.id] + self_obj.fixed_cost_price
return res
_columns = {
'fixed_cost_price': fields.float(
'Fixed Cost Price', digits_compute = dp.get_precision('Sale Price')),
'cost_price': fields.function(_cost_price,
string='Cost Price (incl. BoM)',
digits_compute=dp.get_precision('Sale Price'),
help="The cost price is the standard price or, if the product has a BoM, "
"the sum of all standard prices of its components. It also takes care of the "
"BoM costing like cost per cylce.")
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
cyandterry/Python-Study
|
Ninja/Concept_Implement/OOD.py
|
Python
|
mit
| 134
| 0
|
# C
|
C150 8.1
# Design the data structur
|
e for a generic deck of cards.
class suit():
def __init__(self, v):
self.value = v
|
vgrem/Office365-REST-Python-Client
|
office365/sharepoint/webs/web_collection.py
|
Python
|
mit
| 1,240
| 0.004032
|
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.sharepoint.base_entity_collection import BaseEntityCollection
from office365.sharepoint.webs.web import Web
class WebCollection(BaseEntityCollection):
"""Web collection"""
def __init__(self, context, resource_path=None, parent_web=None):
"""
:type parent_web: Web
"""
super(WebCollection, self).__init__(context, Web, resource_path, parent_web)
def add(self, web_creation_information):
"""
Create WebSite
:type web_creation_information: office365.sharepoint.webs.web_creation_information.WebCreationInformation
"""
target_w
|
eb = Web(self.context)
self.add_child(target_web)
qry = ServiceOperationQuery(self, "add", None, web_creation_information, "parameters", target_web)
self.context.add_query(qry)
return target_web
@property
def resource_url(self):
val = super(WebCollection, self).resource_url
parent_web_url = self._parent.get_
|
property("Url")
if parent_web_url is not None:
val = val.replace(self.context.service_root_url(), parent_web_url + '/_api')
return val
|
alonsopg/AuthorProfiling
|
src/ef_list_punctuation.py
|
Python
|
gpl-2.0
| 3,045
| 0.018421
|
#!
|
/usr/bin/env python
# -*- coding: utf-8
from __future__ import print_function
import argparse
import codecs
import cPickle as pickle
import numpy as np
import os
from load_tweets import load_tweets
NAME='ef_list_punctuation'
prefix='list_punctuation'
if __name__ == "__main__":
# Las opciones de línea de comando
p = argparse.ArgumentParser(NAME)
p.add_argument("DIR",default=None,
action="store", help="Directory with corpus")
p.add_argument("LIST1",default=None,
|
action="store", help="File with list of emoticons")
p.add_argument("-d", "--dir",
action="store", dest="dir",default="feats",
help="Default directory for features [feats]")
p.add_argument("-p", "--pref",
action="store", dest="pref",default=prefix,
help="Prefix to save the file of features %s"%prefix)
p.add_argument("--mix",
action="store_true", dest="mix",default=True,
help="Mix tweets into pefiles")
p.add_argument("--format",
action="store_true", dest="format",default="pan15",
help="Change to pan14 to use format from 2015 [feats]")
p.add_argument("-v", "--verbose",
action="store_true", dest="verbose",
help="Verbose mode [Off]")
p.add_argument("--stopwords", default=None,
action="store", dest="stopwords",
help="List of stop words [data/stopwords.txt]")
opts = p.parse_args()
if opts.verbose:
def verbose(*args):
print(*args)
else:
verbose = lambda *a: None
# Colecta los tweets y sus identificadores (idtweet y idusuario)
tweets,ids=load_tweets(opts.DIR,opts.format,mix=opts.mix)
# Imprime alguna información sobre los tweets
if opts.verbose:
for i,tweet in enumerate(tweets[:10]):
verbose('Tweet example',i+1,tweet[:100])
verbose("Total tweets : ",len(tweets))
try:
verbose("Total usuarios : ",len(set([id for x,id in ids])))
except ValueError:
verbose("Total usuarios : ",len(ids))
# Calculamos los features
# - Cargar lista de palabras uno
list_of_words1 = [line.strip() for line in codecs.open(opts.LIST1,encoding='utf-8') if
len(line.strip())>0]
counts=[]
for usuario in tweets:
usuario=usuario
vec1=[usuario.count(item) for item in list_of_words1]
vec=vec1
counts.append(vec)
# - Contamos las palabras en los tweets
feats = np.asarray(counts)
# Guarda la matrix de features
with open(os.path.join(opts.dir,opts.pref+'.dat'),'wb') as idxf:
pickle.dump(feats, idxf, pickle.HIGHEST_PROTOCOL)
# Imprimimos información de la matrix
verbose("Total de features :",feats.shape[1])
verbose("Total de renglones:",feats.shape[0])
# Guarda los indices por renglones de la matrix (usuario o tweet, usuario)
with open(os.path.join(opts.dir,opts.pref+'.idx'),'wb') as idxf:
pickle.dump(ids, idxf, pickle.HIGHEST_PROTOCOL)
|
novel/lc-tools
|
test/__init__.py
|
Python
|
apache-2.0
| 212
| 0.004717
|
"""Test package"""
import os.path
impo
|
rt sys
def setup_package():
# insert top-level dir to python path
# so we could easily import stuff from
|
# tests
sys.path.insert(0, os.path.abspath("../"))
|
sdu14SoftwareEngineering/GameOfLife_WEB
|
game/tool/room_tool.py
|
Python
|
apache-2.0
| 1,016
| 0
|
from game.models import User
# 存储在内存中的所有房间
rooms_list = []
class Room(object):
id = int # 房间id
name = str # 房间名
owner = int # 房主id
users = [] # 成员id
users_status = {} # 成员准备状态
status = False # 状态
def __init__(self, id, name):
self.id = id
self.name = name
# 获取全部房间
def get_all_rooms():
return rooms_list
# 获取某个房间 id
de
|
f get_room_by_id(id):
for room in rooms_list:
if room.id == id:
return ro
|
om
return None
# 获取某个房间name
def get_room_by_name(name):
for room in rooms_list:
if room.name == name:
return room
return None
# 添加
def add_room(new_room):
rooms_list.append(new_room)
return 1
# 删除
def del_room(id):
for room in rooms_list:
if room.id == id:
rooms_list.remove(room)
return 1
return -1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.