repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
itsWeller/SteamCategorizer
|
categorizer.py
|
Python
|
mit
| 2,724
| 0.008811
|
import vdf
import urllib2
import json
import time
import shelve
filters = ['basic','genres']
URL_BASE = 'http://store.steampowered.com/api/appdetails?appids='
FILTERS = '&filters=' + ','.join(filters)
OWNED_GAMES_URL = 'http://api.steampowered.com/IPlayerService/GetOwnedGames/v0001/?key='
ERR_STR = '\t'*4 + 'Error: '
KEY = 'E84A669BBFC93FD2DDBE9DDC4859CB53'
STEAM_ID = '76561197972789961'
shelve = shelve.open('genre_db')
def find_key(d,k):
for key in d:
if key == k:
return d[key]
return find_key(d[key],k)
def fetch_genres(app_id):
app = str(app_id)
app_genres = []
app_id = str(app_id)
if app_id in shelve:
print app_id + ' exists in DB. Using local copy.'
return shelve[app_id]
time.sleep(1)
try:
res = urllib2.urlopen(URL_BASE+str(app_id)+FILTERS)
data = json.load(res)
except:
print ERR_STR + 'Unable to fetch app: ' + app_id
#continue
return None
if not data[app_id]['success']:
print ERR_STR + 'Response reports failure: ' + app_id
return None
app_name = data[app]['data']
|
['name']
if data[app]['data']['type'] != "game" and data[app]['data']['type'] != "advertising":
print ERR_STR + 'App ' + app_name + ' n
|
ot of type game: ' + app_id + ' ' + data[app]['data']['type']
return None
if 'genres' not in data[app]['data']:
print ERR_STR + 'App ' + app_name + ' - Genres unavailble for app: ' + app_id
return None
for entry in data[app]['data']['genres']:
for field in entry:
if field == 'description':
app_genres += [entry[field]]
print app_name +': ' + app_id + '\n\t' + "New Tags: " + ', '.join(app_genres)
shelve[app_id] = app_genres
return app_genres
def fetch_game_ids():
id_list = []
try:
res = urllib2.urlopen(OWNED_GAMES_URL + KEY + '&steamid=' + STEAM_ID + '&format=json')
data = json.load(res)
except:
print ERR_STR + 'Unable to fetch game list: ' + 'N/A'
for entry in data['response']['games']:
id_list.append(entry['appid'])
return id_list
f = vdf.parse(open('sharedconfig.test.vdf'))
d = find_key(f,'apps')
game_list = fetch_game_ids()
for app in game_list:
built_tags_dict = {}
genre_list = fetch_genres(app)
if not genre_list: continue
for idx,genre in enumerate(genre_list):
built_tags_dict[str(idx)] = str(genre)
if app not in d:
d[app] = {}
d[app]['tags'] = built_tags_dict
f['UserLocalConfigStore']['Software']['Valve']['Steam']['apps'] = d
s = vdf.dump(f,pretty=True)
new_vdf = open('sharedconfig.vdf','w')
new_vdf.write(s)
shelve.close()
|
tedle/acdb
|
acdb/acdb/urls.py
|
Python
|
mit
| 163
| 0
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'^api/', include('api.urls')),
url(r'^', include('base.urls'))
)
| ||
jfozard/pyvol
|
pyvol/mesh/algo.py
|
Python
|
mit
| 602
| 0.026578
|
import numpy as np
import scipy.linalg as la
def calculate_vertex_normals(verts,
|
tris):
v_array = np.array(verts)
tri_array = np.array(tris, dtype=int)
tri_pts = v_array[tri_array]
n =
|
np.cross( tri_pts[:,1] - tri_pts[:,0],
tri_pts[:,2] - tri_pts[:,0])
v_normals = np.zeros(v_array.shape)
for i in range(tri_array.shape[0]):
for j in tris[i]:
v_normals[j,:] += n[i,:]
nrms = np.sqrt(v_normals[:,0]**2 + v_normals[:,1]**2 + v_normals[:,2]**2)
v_normals = v_normals / nrms.reshape((-1,1))
return v_normals
|
k1nk33/NukeBox2000
|
docs/conf.py
|
Python
|
mit
| 8,447
| 0.005327
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# nukebox2000 documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import nukebox2000
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'NukeBox2000'
copyright = u"2016, Darren Dowdall"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = nukebox2000.__version__
# The full version, including alpha/beta/rc tags.
release = nukebox2000.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'nukebox2000doc'
# -- Options for LaTeX output -----------
|
-------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the d
|
ocument tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'nukebox2000.tex',
u'NukeBox2000 Documentation',
u'Darren Dowdall', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'nukebox2000',
u'NukeBox2000 Documentation',
[u'Darren Dowdall'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'nukebox2000',
u'NukeBox2000 Documentation',
u'Darren Dowdall',
'nukebox2000',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
#
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-compute/azure/mgmt/compute/v2017_12_01/models/disk_encryption_settings.py
|
Python
|
mit
| 1,716
| 0.001166
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class DiskEncryptionSettings(Model):
"""Describes a Encryption Settings for a Disk.
:param disk_encryption_key: Specifies the location of the disk encryption
key, which is a Key Vault Secret.
:type disk_encryption_key:
~azure.mgmt.compute.v2017_12_01.models.KeyVaultSecretReference
:param key_encryption_key: Specifies the location of the key encrypt
|
ion
key in Key Vault.
:type key_encryption_key:
~azure.mgmt.compute.v2017_12_01.models.KeyVaultKeyReference
:param enabled: Specifies whether disk encryption should be enabled on the
virtual machine.
:type enabled: bool
"""
_attribute_map = {
'disk_encryption_key': {'key': 'diskEncryptionKey', 'type': 'KeyVaultSecretReference'},
'key_encryption_ke
|
y': {'key': 'keyEncryptionKey', 'type': 'KeyVaultKeyReference'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(DiskEncryptionSettings, self).__init__(**kwargs)
self.disk_encryption_key = kwargs.get('disk_encryption_key', None)
self.key_encryption_key = kwargs.get('key_encryption_key', None)
self.enabled = kwargs.get('enabled', None)
|
marrow/web.component.page
|
web/component/page/render.py
|
Python
|
mit
| 3,059
| 0.037267
|
# encoding: cinje
: import traceback
: from marrow.package.canonical import name
: log = __import__('logging').getLogger(__name__)
: def render_page_panel context, page, wrap
: from web.component.asset.render import render_asset_panel
: using render_asset_panel context, page, True
<li class="list-group-item" id="wc-blocks">
<h4>
Block Structure
<a href="#" style="display: inline-block; margin: -10px;"><sup style="display: inline-block; padding: 10px 10px 0;"><i class="fa fa-question-circle small"></i></sup></a>
</h4>
<menu class="" id="wc-page-blocks" data-asset="${context.asset.path}">
: for block in page.content
: use block._block_list_item
: end
</menu>
</li>
: end
: end
: def render_block context, page, block
: try
: use block.__html_stream__ context
: except
: log.exception("Error processing block: " + repr(block), extra=dict(block=block.id, page=page.id))
: if __debug__
<pre class="text-error"><code>${traceback.format_exc()}</code></pre>
: else
<b class="text-error">An unknown error occurred.</b>
: end
: end
: end
: def render_page_content context, page
# Load page content if not already loaded.
: content = page.content if page.content else page.__class__.objects.scalar('content').get(id=page.id)
: columns = False
: width = 12
: for block in page.content
: size = block.properties.get('
|
width', 12)
: width -= size
: if width and not columns
: columns = True
<div class="container row-fluid clearfix">
: end
: use render_block context, page, block
: if width <= 0
: width = 12
|
: if columns
: columns = False
</div>
: end
: end
: end
: if columns
</div>
: end
: end
: end
: def render_page context, asset
# First, we work out what the title should look like.
: title = [str(asset), str(context.croot)]
: if context.croot.properties.get('direction', 'rtl') == 'ltr'
: title.reverse()
: end
: title = context.croot.properties.get('separator', ' - ').join(title)
: title = title.upper() if context.croot.properties.get('titlecase', 'normal') == 'upper' else title
: classes = set()
: classes.update(context.croot.properties.get('cls', '').split())
: classes.update(asset.properties.get('cls', '').split())
: using context.theme context, title=title, styles=['/public/css/site.css'], scripts=['/public/js/site.js'], lang=context.lang, class_=classes
<article data-theme="${name(context.theme)}">
: flush
: for chunk in asset.__html_stream__(context)
: yield chunk
: end
</article>
: if not __debug__ and 'ua' in context.croot.properties
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', '${context.croot.properties.ua}', 'auto');
ga('send', 'pageview');
</script>
: end
: end
: flush
: end
|
MTG/essentia
|
test/src/unittests/stats/test_minmax.py
|
Python
|
agpl-3.0
| 2,805
| 0.001783
|
#!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia.standard import MinMax
from essentia_test import TestCase, TextTestRunner, allTests
class TestMinMax(TestCase):
def testDefaultIsMin(self):
"""Making the Algorithm with no type defaults to min"""
value, index = MinMax()([3,1,2])
self.assertEqual(value, 1.0)
self.assertEqual(index, 1)
def testEmpty(self):
"""An empty input causes an exception"""
self.assertComputeFails(MinMax(), [])
self.assertComputeFails(MinMax(type="max"), [])
def testAllSame(self):
value, index = MinMax()([0]*10)
self.assertEqual(value, 0.0)
self.assertEqual(index, 0)
def testMultipleValues(self):
"""If the minimum or maximum value appears more than once in the list, return the first one"""
value, index = MinMax()([5, 1, 8, 4, 1, 9, 1])
self.assertEqual(value, 1.0)
self.assertEqual(index, 1)
value, index = MinMax(type="max")([9, 6, 5, 9, 8, 4, 9])
self.assertEqual(value, 9.0)
self.assertEqual(index, 0)
def testOne(self):
"""An array of a single value"""
value, index = MinMax()([100])
self.assertEqual(value, 100.0)
self.assertEqual(index, 0)
def testMin(self):
"""An array of normal numbers, finding minimum"""
value, index = MinMax()([5, 8, 4, 9, 1])
self.assertEqual(value, 1.0)
self.assertEqual(index, 4)
def testNegatives(self):
value, index = MinMax()([3, 7,
|
-45, 2, -1, 0])
self.assertEqual(value, -45.0)
self.assertEqual(index, 2)
def testMixedTypes(self):
value, index = MinMax()([4, 5, 3.3])
self.ass
|
ertAlmostEqual(value, 3.3)
self.assertEqual(index, 2)
def testMax(self):
value, index = MinMax(type="max")([3, 7, -45, 2, -1, 0])
self.assertEqual(value, 7)
self.assertEqual(index, 1)
suite = allTests(TestMinMax)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
|
soybean217/lora-python
|
UServer/userver/user/models.py
|
Python
|
mit
| 4,041
| 0.002475
|
from database.db_sql import db_sql as db
from sqlalchemy import PrimaryKeyConstraint
from itsdangerous import (TimedJSONWebSignatureSerializer as Serializer,
BadSignature, SignatureExpired)
from flask import current_app
from . import passwords
import time
from sqlalchemy.orm import relationship
class User(db.Model):
__bind_key__ = 'lorawan'
__table_args__ = {'schema': 'lorawan'}
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
# User email information
email = db.Column(db.String(255), nullable=False, unique=True)
# User authentication information
password = db.Column(db.String(255), nullable=False, server_default='')
reset_password_token = db.Column(db.String(100), nullable=True)
confirmed_at = db.Column(db.DateTime())
confirm_email_token = db.Column(db.String(100), nullable=True)
# User information
active = db.Column('is_active', db.Boolean(), nullable=False, server_default='0')
first_name = d
|
b.Column(db.String(100), nullab
|
le=True)
last_name = db.Column(db.String(100), nullable=True)
# Relationships
roles = relationship('Role', secondary='lorawan.user_roles')
apps = relationship('Application')
gateways = relationship('Gateway')
def generate_auth_token(self, expiration=600):
s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': self.id})
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.query.get(data['id'])
return user
def verify_password(self, password):
"""
Make it backward compatible to legacy password hash.
In addition, if such password were found, update the user's password field.
"""
hashed_password = self.password
verified = passwords.verify_password(current_app.user_manager, password, hashed_password)
return verified
def role_name_list(self):
name_list = []
for role in self.roles:
name_list.append(role.name)
return name_list
def obj_to_dict(self):
return {
'id': self.id,
'email': self.email,
'roles': self.role_name_list(),
'confirmed_at': str(self.confirmed_at),
'active': str(self.active),
'app_num': self.apps.__len__(),
'gateway_num': self.gateways.__len__()
}
# Define the Role data model
class Role(db.Model):
__bind_key__ = 'lorawan'
__table_args__ = {'schema': 'lorawan'}
__tablename__ = 'role'
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(50), unique=True)
# Define the UserRoles data model
class UserRoles(db.Model):
__bind_key__ = 'lorawan'
__tablename__ = 'user_roles'
__table_args__ = {'schema': 'lorawan'}
db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('lorawan.user.id', ondelete='CASCADE'))
role_id = db.Column(db.Integer(), db.ForeignKey('lorawan.role.id', ondelete='CASCADE'))
PrimaryKeyConstraint(user_id, role_id)
class UserInvitation(db.Model):
__bind_key__ = 'lorawan'
__tablename__ = 'user_invite'
__table_args__ = {'schema': 'lorawan'}
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255))
# save the user of the invitee
invited_by_user_id = db.Column(db.Integer, db.ForeignKey('lorawan.user.id'))
# token used for registration page to identify user registering
token = db.Column(db.String(100), nullable=False, server_default='')
used_by = db.Column(db.Integer, db.ForeignKey('lorawan.user.id'))
create_at = db.Column(db.DateTime(), nullable=False)
expired = db.Column(db.Boolean())
|
foxcarlos/pyganso
|
cliente_socket.py
|
Python
|
gpl-3.0
| 350
| 0.005714
|
import socket
import android
droid = android.Android()
servidor = droid.dialogGetInput('Ser
|
vidor', 'Ingrese el Servidor').result
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#'foxcarlos.no-ip.biz'
server.connect((servidor, 8000))
mensaje = droid.dialogGetInput('Mensaje', 'Ingrese el Mensaje a Enviar').result
server.send(mensaje)
| |
Winand/pandas
|
pandas/core/indexes/datetimes.py
|
Python
|
bsd-3-clause
| 79,180
| 0.000025
|
# pylint: disable=E1101
from __future__ import division
import operator
import warnings
from datetime import time, datetime
from datetime import timedelta
import numpy as np
from pandas.core.base import _shared_docs
from pandas.core.dtypes.common import (
_NS_DTYPE, _INT64_DTYPE,
is_object_dtype, is_datetime64_dtype,
is_datetimetz, is_dtype_equal,
is_integer, is_float,
is_integer_dtype,
is_datetime64_ns_dtype,
is_period_dtype,
is_bool_dtype,
is_string_dtype,
is_list_like,
is_scalar,
pandas_dtype,
_ensure_int64)
from pandas.core.dtypes.generic import ABCSeries
from pandas.core.dtypes.dtypes import DatetimeTZDtype
from pandas.core.dtypes.missing import isna
import pandas.core.dtypes.concat as _concat
from pandas.errors import PerformanceWarning
from pandas.core.common import _values_from_object, _maybe_box
from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core.indexes.numeric import Int64Index, Float64Index
import pandas.compat as compat
from pandas.tseries.frequencies import (
to_offset, get_period_alias,
Resolution)
from pandas.core.indexes.datetimelike import (
DatelikeOps, TimelikeOps, DatetimeIndexO
|
psMixin)
from pandas.tseries.offsets import DateOffset, generate_range, Tick, CDay
from pandas.core.tools.datetimes import (
parse_time_string, normalize_date, to_time)
from pandas.core.tools.timedeltas import to_timedelta
from pandas.util._decorators import (Appender, cache_readonly,
deprecate_kwarg, Substitution)
import pan
|
das.core.common as com
import pandas.tseries.offsets as offsets
import pandas.core.tools.datetimes as tools
from pandas._libs import (lib, index as libindex, tslib as libts,
algos as libalgos, join as libjoin,
Timestamp, period as libperiod)
from pandas._libs.tslibs import timezones
def _utc():
import pytz
return pytz.utc
# -------- some conversion wrapper functions
def _field_accessor(name, field, docstring=None):
def f(self):
values = self.asi8
if self.tz is not None:
utc = _utc()
if self.tz is not utc:
values = self._local_timestamps()
if field in self._bool_ops:
if field in ['is_month_start', 'is_month_end',
'is_quarter_start', 'is_quarter_end',
'is_year_start', 'is_year_end']:
month_kw = (self.freq.kwds.get('startingMonth',
self.freq.kwds.get('month', 12))
if self.freq else 12)
result = libts.get_start_end_field(values, field, self.freqstr,
month_kw)
else:
result = libts.get_date_field(values, field)
# these return a boolean by-definition
return result
if field in self._object_ops:
result = libts.get_date_name_field(values, field)
result = self._maybe_mask_results(result)
else:
result = libts.get_date_field(values, field)
result = self._maybe_mask_results(result, convert='float64')
return Index(result, name=self.name)
f.__name__ = name
f.__doc__ = docstring
return property(f)
def _dt_index_cmp(opname, nat_result=False):
"""
Wrap comparison operations to convert datetime-like to datetime64
"""
def wrapper(self, other):
func = getattr(super(DatetimeIndex, self), opname)
if (isinstance(other, datetime) or
isinstance(other, compat.string_types)):
other = _to_m8(other, tz=self.tz)
result = func(other)
if isna(other):
result.fill(nat_result)
else:
if isinstance(other, list):
other = DatetimeIndex(other)
elif not isinstance(other, (np.ndarray, Index, ABCSeries)):
other = _ensure_datetime64(other)
result = func(np.asarray(other))
result = _values_from_object(result)
if isinstance(other, Index):
o_mask = other.values.view('i8') == libts.iNaT
else:
o_mask = other.view('i8') == libts.iNaT
if o_mask.any():
result[o_mask] = nat_result
if self.hasnans:
result[self._isnan] = nat_result
# support of bool dtype indexers
if is_bool_dtype(result):
return result
return Index(result)
return wrapper
def _ensure_datetime64(other):
if isinstance(other, np.datetime64):
return other
raise TypeError('%s type object %s' % (type(other), str(other)))
_midnight = time(0, 0)
def _new_DatetimeIndex(cls, d):
""" This is called upon unpickling, rather than the default which doesn't
have arguments and breaks __new__ """
# data are already in UTC
# so need to localize
tz = d.pop('tz', None)
result = cls.__new__(cls, verify_integrity=False, **d)
if tz is not None:
result = result.tz_localize('UTC').tz_convert(tz)
return result
class DatetimeIndex(DatelikeOps, TimelikeOps, DatetimeIndexOpsMixin,
Int64Index):
"""
Immutable ndarray of datetime64 data, represented internally as int64, and
which can be boxed to Timestamp objects that are subclasses of datetime and
carry metadata such as frequency information.
Parameters
----------
data : array-like (1-dimensional), optional
Optional datetime-like data to construct index with
copy : bool
Make a copy of input ndarray
freq : string or pandas offset object, optional
One of pandas date offset strings or corresponding objects
start : starting value, datetime-like, optional
If data is None, start is used as the start point in generating regular
timestamp data.
periods : int, optional, > 0
Number of periods to generate, if generating index. Takes precedence
over end argument
end : end time, datetime-like, optional
If periods is none, generated index will extend to first conforming
time on or just past end argument
closed : string or None, default None
Make the interval closed with respect to the given frequency to
the 'left', 'right', or both sides (None)
tz : pytz.timezone or dateutil.tz.tzfile
ambiguous : 'infer', bool-ndarray, 'NaT', default 'raise'
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False signifies a
non-DST time (note that this flag is only applicable for ambiguous
times)
- 'NaT' will return NaT where there are ambiguous times
- 'raise' will raise an AmbiguousTimeError if there are ambiguous times
infer_dst : boolean, default False
.. deprecated:: 0.15.0
Attempt to infer fall dst-transition hours based on order
name : object
Name to be stored in the index
Notes
-----
To learn more about the frequency strings, please see `this link
<http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases>`__.
"""
_typ = 'datetimeindex'
_join_precedence = 10
def _join_i8_wrapper(joinf, **kwargs):
return DatetimeIndexOpsMixin._join_i8_wrapper(joinf, dtype='M8[ns]',
**kwargs)
_inner_indexer = _join_i8_wrapper(libjoin.inner_join_indexer_int64)
_outer_indexer = _join_i8_wrapper(libjoin.outer_join_indexer_int64)
_left_indexer = _join_i8_wrapper(libjoin.left_join_indexer_int64)
_left_indexer_unique = _join_i8_wrapper(
libjoin.left_join_indexer_unique_int64, with_indexers=False)
_arrmap = None
__eq__ = _dt_index_cmp('__eq__')
__ne__ = _dt_index_cmp('__ne__', nat_result=True)
__lt__ = _dt_index_cmp('__lt__')
__gt__ = _dt_index_cmp('__gt__')
__le__ = _dt_index_cmp('__le__')
__ge__ = _dt_inde
|
chryswoods/Sire
|
corelib/build/svnmvall.py
|
Python
|
gpl-2.0
| 200
| 0.01
|
#!/bin/env python
import s
|
ys
import os
args = sys.argv[1:]
files = args[0:-1]
newdir = args[-1]
for file in files:
cmd = "svn mv %s %s/" % (file,newdir)
print cmd
os.s
|
ystem(cmd)
|
floooh/fips
|
verbs/unset.py
|
Python
|
mit
| 705
| 0.01844
|
"""unset a default setting
unset config
unset target
"""
from mod import log, settings
#---------------------------------------------------------
|
----------------------
def run(fips_dir, proj_dir, args) :
"""run the 'unset' verb"""
if len(args) > 0 :
noun = args[0]
settings.unset(proj_dir, noun)
else :
log.error("expected noun: {}".format(', '.join(valid_nouns)))
#----------------------------------------------
|
---------------------------------
def help() :
"""print 'unset' help"""
log.info(log.YELLOW +
"fips unset [{}]\n" .format('|'.join(settings.valid_settings)) + log.DEF +
" unset currently active config or make-target")
|
txiner/db-xiner
|
hustle/core/pipeline.py
|
Python
|
mit
| 19,643
| 0.002851
|
from disco.core import Job
from disco.worker.task_io import task_input_stream
import hustle
import hustle.core
import hustle.core.marble
from hustle.core.marble import Marble, Column, Aggregation
from functools import partial
from hustle.core.pipeworker import HustleStage
import sys
SPLIT = "split"
GROUP_ALL = "group_all"
GROUP_LABEL = "group_label"
GROUP_LABEL_NODE = "group_node_label"
GROUP_NODE = "group_node"
# default number of partitions, users can set this in the settings.yaml
_NPART = 16
def hustle_output_stream(stream, partition, url, params, result_table):
class HustleOutputStream(object):
def __init__(self, stream, url, params, **kwargs):
import tempfile
from wtrie import Trie
self.result_table = result_table
self.result_columns = result_table._field_names
tmpdir = getattr(params, 'tmpdir', '/tmp')
self.filename = tempfile.mktemp(prefix="hustle", dir=tmpdir)
maxsize = getattr(params, 'maxsize', 100 * 1024 * 1024)
self.env, self.txn, self.dbs, self.meta = self.result_table._open(self.filename, maxsize, write=True, lru_size=10000)
self.autoinc = 1
self.url = url
self.vid_trie = Trie()
self.vid16_trie = Trie()
def add(self, k, v):
from hustle.core.marble import _insert_row
data = dict(zip(self.result_columns, list(k) + list(v)))
#print "BOZAK! adding %s %s %s" % (self.result_columns, k, v)
_insert_row(data,
self.txn,
self.dbs,
self.autoinc,
self.vid_trie,
self.vid16_trie)
self.autoinc += 1
def close(self):
import os
import ujson
self.meta.put(self.txn, '_total_rows', str(self.autoinc))
vid_nodes, vid_kids, _ = self.vid_trie.serialize()
vid16_nodes, vid16_kids, _ = self.vid16_trie.serialize()
vn_ptr, vn_len = vid_nodes.buffer_info()
vk_ptr, vk_len = vid_kids.buffer_info()
vn16_ptr, vn16_len = vid16
|
_nodes.buffer_info()
vk16_ptr, vk16_len = vid16_kids.buffer_info()
self.meta.put_raw(self.txn, '_vid_nodes', vn_ptr, vn_len)
self.meta.put_raw(self.txn, '_vid_kids', vk_ptr, vk_len)
self.meta.put_raw(self.txn, '_vid16_nodes', vn16_ptr, vn16_len)
self.meta.put_raw(self.txn, '_vid16_kids', vk16_ptr, vk16_len)
self.meta.put(self.txn, 'name', ujson.dumps(self.result_ta
|
ble._name))
self.meta.put(self.txn, 'fields', ujson.dumps(self.result_table._fields))
self.meta.put(self.txn, 'partition', ujson.dumps(self.result_table._partition))
for index, (subdb, subindexdb, bitmap_dict, column) in self.dbs.iteritems():
if subindexdb:
# process all values for this bitmap index
if column.index_indicator == 2:
bitmap_dict.evictAll()
else:
for val, bitmap in bitmap_dict.iteritems():
subindexdb.put(self.txn, val, bitmap.dumps())
self.txn.commit()
try:
self.env.copy(self.url)
print "Dumped result to %s" % self.url
except Exception as e:
print "Copy error: %s" % e
self.txn.abort()
raise e
self.env.close()
os.unlink(self.filename)
return HustleOutputStream(stream, url, params)
def hustle_input_stream(fd, size, url, params, wheres, gen_where_index, key_names):
from disco import util
from hustle.core.marble import Expr, MarbleStream
from itertools import izip, repeat
empty = ()
try:
scheme, netloc, rest = util.urlsplit(url)
except Exception as e:
print "Error handling hustle_input_stream for %s. %s" % (url, e)
raise e
fle = util.localize(rest, disco_data=params._task.disco_data, ddfs_data=params._task.ddfs_data)
# print "FLOGLE: %s %s" % (url, fle)
otab = None
try:
# import sys
# sys.path.append('/Library/Python/2.7/site-packages/pycharm-debug.egg')
# import pydevd
# pydevd.settrace('localhost', port=12999, stdoutToServer=True, stderrToServer=True)
otab = MarbleStream(fle)
bitmaps = {}
for index, where in enumerate(wheres):
# do not process where clauses that have nothing to do with this marble
if where._name == otab.marble._name:
if type(where) is Expr and not where.is_partition:
bm = where(otab)
bitmaps[index] = (bm, len(bm))
else:
# it is either the table itself, or a partition expression. either way,
# return the entire table
bitmaps[index] = (otab.iter_all(), otab.number_rows)
for index, (bitmap, blen) in bitmaps.iteritems():
prefix_gen = [repeat(index, blen)] if gen_where_index else []
row_iter = prefix_gen + [otab.mget(col, bitmap) if col is not None else repeat(None, blen)
for col in key_names[index]]
for row in izip(*row_iter):
yield row, empty
finally:
if otab:
otab.close()
class SelectPipe(Job):
# profile = True
required_modules = [
('hustle', hustle.__file__),
('hustle.core', hustle.core.__file__),
('hustle.core.pipeline', __file__),
('hustle.core.marble', hustle.core.marble.__file__)]
def get_result_schema(self, project):
import random
from hustle import Table
if self.output_table:
return self.output_table
fields = []
for col_spec in project:
col = col_spec.column
if col.name not in fields:
fields.append(col.schema_string())
name = '-'.join([w._name for w in self.wheres])[:64]
# append a 3-digit random suffix to avoid name collision
self.output_table = Table(name="sub-%s-%03d" % (name, random.randint(0, 999)),
fields=fields)
return self.output_table
def _get_table(self, obj):
"""If obj is a table return its name otherwise figure out what it is and return the tablename"""
if isinstance(obj, Marble):
return obj
else:
return obj.table
def _resolve(self, cols, check, types=(Column, Aggregation)):
rval = []
for i, col in enumerate(cols):
if isinstance(col, types):
rval.append(col)
elif isinstance(col, basestring):
selectcol = next((c for c in check if c.name == col or c.fullname == col), None)
if selectcol:
rval.append(selectcol)
elif isinstance(col, int):
if col < len(check):
rval.append(check[col])
return rval
def _get_key_names(self, project, join):
result = []
for where in self.wheres:
table_name = self._get_table(where)._name
rval = []
if join:
join_column = next(c.name for c in join if c.table._name == table_name)
rval.append(join_column)
rval += tuple(c.column.name if c.table and c.table._name == table_name else None for c in project)
result.append(rval)
return result
def __init__(self,
master,
wheres,
project=(),
order_by=(),
join=(),
distinct=False,
desc=False,
limit=0,
partition=0,
nest=False,
pre_order_stage=()):
from hustle.core.pipeworker import Worker
super(SelectPipe, self).__init__(master=master, worker=Worker())
self.wheres = wheres
|
jocave/snapcraft
|
snapcraft/tests/test_commands_cleanbuild.py
|
Python
|
gpl-3.0
| 3,951
| 0
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import tarfile
from unittest import mock
import fixtures
from snapcraft.main import main
from snapcraft import tests
class CleanBuildCommandTestCase(tests.TestCase):
yaml_template = """name: snap-test
version: 1.0
summary: test cleanbuild
description: if snap is succesful a snap package will be available
architectures: ['amd64']
confinement: strict
parts:
part1:
plugin: nil
"""
def make_snapcraft_yaml(self, n=1):
super().make_snapcraft_yaml(self.yaml_template)
self.state_dir = os.path.join(self.parts_dir, 'part1', 'state')
@mock.patch('snapcraft.internal.lxd.check_call')
@mock.patch('snapcraft.internal.repo.is_package_installed')
def test_cleanbuild(self, mock_installed, mock_call):
mock_installed.return_value = True
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
self.make_snapcraft_yaml()
# simulate build artifacts
dirs = [
os.path.join(self.parts_dir, 'part1', 'src'),
self.stage_dir,
self.snap_dir,
os.path.join(self.parts_dir, 'plugins'),
]
files_tar = [
os.path.join(self.parts_dir, 'plugins', 'x-plugin.py'),
'main.c',
|
]
files_no_tar = [
os.path.join(self.stage_dir, 'binary'),
os.path.join(self.snap_dir, 'binary'),
'snap-test.snap',
'snap-test_1.0_source.tar.bz2',
]
for d in dirs:
os.makedirs(d)
for f in files_tar + fi
|
les_no_tar:
open(f, 'w').close()
main(['cleanbuild', '--debug'])
self.assertIn(
'Setting up container with project assets\n'
'Waiting for a network connection...\n'
'Network connection established\n'
'Retrieved snap-test_1.0_amd64.snap\n',
fake_logger.output)
with tarfile.open('snap-test_1.0_source.tar.bz2') as tar:
tar_members = tar.getnames()
for f in files_no_tar:
f = os.path.relpath(f)
self.assertFalse('./{}'.format(f) in tar_members,
'{} should not be in {}'.format(f, tar_members))
for f in files_tar:
f = os.path.relpath(f)
self.assertTrue('./{}'.format(f) in tar_members,
'{} should be in {}'.format(f, tar_members))
@mock.patch('snapcraft.internal.repo.is_package_installed')
def test_no_lxd(self, mock_installed):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
mock_installed.return_value = False
with self.assertRaises(SystemExit) as raised:
main(['cleanbuild'])
self.maxDiff = None
self.assertEqual(1, raised.exception.code)
self.assertEqual(
fake_logger.output,
'The lxd package is not installed, in order to use `cleanbuild` '
'you must install lxd onto your system. Refer to the '
'"Ubuntu Desktop and Ubuntu Server" section on '
'https://linuxcontainers.org/lxd/getting-started-cli/'
'#ubuntu-desktop-and-ubuntu-server to enable a proper setup.\n')
|
openstack/python-aodhclient
|
aodhclient/osc.py
|
Python
|
apache-2.0
| 1,844
| 0
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStackClient plugin for Telemetry Alarming service."""
from osc_lib import utils
DEFAULT_ALARMING_API_VERSION = '2'
API_VERSION_OPTION = 'os_alarming_api_version'
API_NAME = "alarming"
API_VERSIONS = {
"2": "aodhclient.v2.client.Client",
}
def make_client(instance):
"""Returns an queues service client."""
version = instance._api_version[API_NAME]
try:
version = int(version)
except ValueError:
version = float(version)
aodh_client = utils.get_client_class(
API_NAME,
version,
API_VERSIONS)
# NOTE(sileht): ensure setup of the session is done
instance.setup_auth()
return aodh_client(session=instance.session,
interface=instance.interface,
region_name=instance.region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument(
'--os-alarming-api-v
|
ersion',
metavar='<alarming-api-version>',
default=utils.env(
'OS_ALARMING_API_VERSION',
default=DEFAULT_ALARMING_API_VERSION),
help=('Queues API version, default=' +
DEFAULT_ALARMING_API_VERSION +
' (Env: OS_ALAR
|
MING_API_VERSION)'))
return parser
|
zhkzyth/a-super-fast-crawler
|
logmanager.py
|
Python
|
mit
| 783
| 0.003995
|
#coding:utf8
"""
yet another logging wrapper
- log level
logging.CRITICAL,
logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG
"""
import logging
from
|
config import PROJECT_ROOT
def configLogger(logFile="spider.log", logLevel=logging.DEBUG, logTree=""):
logFile = PROJECT_ROOT+"/log/"+ logFile
'''配置logging的日志文件以及日志的记录等级'''
logger = logging.getLogger(logTree)
formatter = logging.Formatter(
'%(asctime)s %(threadName)s %(levelname)s %(message)s')
try:
fileHandler = logging.FileHandler(logFile)
except IOError, e:
raise IOError
else:
fi
|
leHandler.setFormatter(formatter)
logger.addHandler(fileHandler)
logger.setLevel(logLevel)
return logger
|
ballotify/django-backend
|
ballotify/apps/api_v1/questions/serializers.py
|
Python
|
agpl-3.0
| 4,502
| 0.001333
|
from django.db import transaction
from rest_framework import serializers
from questions.models import Question, Choice
from votes.models import Vote, VoteChoice
from streams.models import Stream
from ..accounts.serializers import AccountSerializer
class VoteChoiceUserSerializer(serializers.ModelSerializer):
user = AccountSerializer(read_only=True)
class Meta:
model = VoteChoice
fields = ('user', 'created')
class VoteChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = VoteChoice
fields = ('choice',)
class VoteSerializer(serializers.ModelSerializer):
choices = VoteChoiceSerializer(many=True)
class Meta:
model = Vote
fields = ('choices', 'user_agent', 'ip',)
read_only_fields = ('user_agent', 'ip',)
def validate(self, attrs):
question = self.context['view'].get_question()
user = self.context['request'].user
if Vote.objects.filter(user=user, question=question).exists():
raise serializers.ValidationError('Current user already voted for this question.')
return attrs
def create(self, validated_data):
"""
Custom create method. Support nested multiple vote choices creation.
"""
assert "choices" in validated_data
choices_data = validated_data.pop("choices")
vote = Vote(**validated_data)
vote.save()
self.create_choices(vote, choices_data)
return vote
def create_choices(self, vote, choices_data):
VoteChoice.objects.bulk_create([VoteChoice(
vote=vote,
user_id=vote.user_id,
**choice_data
) for choice_data in choices_data])
class ChoiceListSerializer(serializers.ListSerializer):
def to_representation(self, data):
iterable = data.all()
view = self.context['view']
if hasattr(view, 'get_question') and self.context['view'].get_question().is_randomized:
iterable = iterable.order_by('?')
return [
self.child.to_representation(item) for item in iterable
]
class ChoiceSerializer(serializers.ModelSerializer):
vote_choices = serializers.SerializerMethodField()
votes_count = serializers.SerializerMethodField()
class Meta:
model = Choice
list_serializer_class = ChoiceListSerializer
fields = ('id', 'title', 'vote_choices', 'votes_count')
read_only_fields = ('id',)
def get_vote_choices(self, choice):
serializer = VoteChoiceUserSerializer(choice.vote_choices.all()[:14], many=True)
return serializer.data
def get_votes_count(self, choice):
return choice.vote_choices.all().count()
class QuestionSerializer(serializers.ModelSerializer):
stream = serializers.SlugRelatedField(queryset=Stream.objects.all(), slug_field='slug', required=False)
slug = serializers.CharField(required=False)
is_voted = serializers.SerializerMethodField()
is_owner = serializers.SerializerMethodField()
choices = ChoiceSerializer(many=True)
class Meta:
model = Question
fields = (
'stream', 'title', 'slug', 'choices', 'modified', 'created', 'is_voted', 'is_owner', 'is_anonymous',
'is_multiple', 'is_private', 'is_randomized'
)
|
re
|
ad_only_fields = ('slug', 'modified', 'created', )
@transaction.atomic
def create(self, validated_data):
"""
Custom create method. Prepare and create nested choices.
"""
choices_data = validated_data.pop("choices", None)
question = Question(**validated_data)
question.save()
self.create_choices(question, choices_data)
return question
def create_choices(self, question, choices_data):
Choice.objects.bulk_create(
[Choice(question=question, **choice_data) for choice_data in choices_data]
)
def get_is_voted(self, question):
request = self.context.get("request")
if not request.user.is_authenticated():
return False
return question.votes.filter(user=request.user).exists()
def get_is_owner(self, question):
request = self.context.get("request")
if not request.user.is_authenticated():
return False
return question.user == request.user
class QuestionDetailSerializer(QuestionSerializer):
class Meta(QuestionSerializer.Meta):
read_only_fields = ('stream',)
|
AgainFaster/django-wombat-authenticator
|
wombat_authenticator/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 5,385
| 0.008357
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'WombatToken'
db.create_table('wombat_authenticator_wombattoken', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('store', self.gf('django.db.models.fields.CharField')(max_length=50)),
('token', self.gf('django.db.models.fields.CharField')(max_length=50)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='api_auth_token', unique=True, to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('wombat_authenticator', ['WombatToken'])
# Adding unique constraint on 'WombatToken', fields ['store', 'token']
db.create_unique('wombat_authenticator_wombattoken', ['store', 'token'])
def backwards(self, orm):
# Removing unique constraint on 'WombatToken', fields ['store', 'token']
db.delete_unique('wombat_authenticator_wombattoken', ['store', 'token'])
# Deleting model 'WombatToken'
db.delete_table('wombat_authenticator_wombattoken')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.d
|
b.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name'
|
: ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '72'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wombat_authenticator.wombattoken': {
'Meta': {'unique_together': "(('store', 'token'),)", 'object_name': 'WombatToken'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'store': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'api_auth_token'", 'unique': 'True', 'to': "orm['auth.User']"})
}
}
complete_apps = ['wombat_authenticator']
|
pexip/os-pytest
|
src/_pytest/_version.py
|
Python
|
mit
| 142
| 0
|
# coding: utf-8
# file generated by setuptools_scm
|
# don't change, don't track in version control
version
|
= '6.2.5'
version_tuple = (6, 2, 5)
|
manojklm/pywinauto-x64
|
pywinauto/win32structures.py
|
Python
|
lgpl-2.1
| 32,760
| 0.004701
|
# GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"Definition of Windows structures"
from __future__ import absolute_import
__revision__ = "$Revisio
|
n: 560 $"
from .win32defines import LF_FACESIZE, NM
|
TTDISPINFOW_V1_SIZE, HDITEMW_V1_SIZE
import sys
import ctypes
from ctypes import \
c_int, c_uint, c_long, c_ulong, c_void_p, c_wchar, c_char, \
c_ubyte, c_ushort, c_wchar_p, \
POINTER, sizeof, alignment, Union, c_ulonglong, c_longlong, c_size_t
def is_x64():
return sizeof(c_size_t) == 8
class Structure(ctypes.Structure):
"Override the Structure class from ctypes to add printing and comparison"
#----------------------------------------------------------------
def __str__(self):
"""Print out the fields of the ctypes Structure
fields in exceptList will not be printed"""
lines = []
for f in self._fields_:
name = f[0]
lines.append("%20s\t%s"% (name, getattr(self, name)))
return "\n".join(lines)
#----------------------------------------------------------------
def __eq__(self, other_struct):
"return true if the two structures have the same coordinates"
if isinstance(other_struct, ctypes.Structure):
try:
# pretend they are two structures - check that they both
# have the same value for all fields
are_equal = True
for field in self._fields_:
name = field[0]
if getattr(self, name) != getattr(other_struct, name):
are_equal = False
break
return are_equal
except AttributeError:
return False
if isinstance(other_struct, (list, tuple)):
# Now try to see if we have been passed in a list or tuple
try:
are_equal = True
for i, field in enumerate(self._fields_):
name = field[0]
if getattr(self, name) != other_struct[i]:
are_equal = False
break
return are_equal
except:
return False
return False
##====================================================================
#def PrintCtypesStruct(struct, exceptList = []):
# """Print out the fields of the ctypes Structure
#
# fields in exceptList will not be printed"""
# for f in struct._fields_:
# name = f[0]
# if name in exceptList:
# continue
# print("%20s "% name, getattr(struct, name))
# allow ctypes structures to be pickled
# set struct.__reduce__ = _reduce
# e.g. RECT.__reduce__ = _reduce
def _construct(typ, buf):
#print "construct", (typ, buf)
obj = typ.__new__(typ)
ctypes.memmove(ctypes.addressof(obj), buf, len(buf))
return obj
def _reduce(self):
return (_construct, (self.__class__, str(buffer(self))))
#LPTTTOOLINFOW = POINTER(tagTOOLINFOW)
#PTOOLINFOW = POINTER(tagTOOLINFOW)
BOOL = c_int
BYTE = c_ubyte
CHAR = c_char
DWORD = c_ulong
HANDLE = c_void_p
HBITMAP = c_long
LONG = c_long
LPVOID = c_void_p
PVOID = c_void_p
UINT = c_uint
WCHAR = c_wchar
WORD = c_ushort
COLORREF = DWORD
LPBYTE = POINTER(BYTE)
LPWSTR = c_size_t #POINTER(WCHAR)
DWORD_PTR = UINT_PTR = ULONG_PTR = c_size_t
if is_x64():
INT_PTR = LONG_PTR = c_longlong
else:
INT_PTR = LONG_PTR = c_long
HBITMAP = LONG_PTR #LONG
HINSTANCE = LONG_PTR #LONG
HMENU = LONG_PTR #LONG
HBRUSH = LONG_PTR #LONG
HTREEITEM = LONG_PTR #LONG
HWND = LONG_PTR #LONG
LPARAM = LONG_PTR
WPARAM = UINT_PTR
class POINT(Structure):
_fields_ = [
# C:/PROGRA~1/MIAF9D~1/VC98/Include/windef.h 307
('x', LONG),
('y', LONG),
]
assert sizeof(POINT) == 8, sizeof(POINT)
assert alignment(POINT) == 4, alignment(POINT)
#====================================================================
class RECT(Structure):
"Wrap the RECT structure and add extra functionality"
_fields_ = [
# C:/PROGRA~1/MIAF9D~1/VC98/Include/windef.h 287
('left', LONG),
('top', LONG),
('right', LONG),
('bottom', LONG),
]
#----------------------------------------------------------------
def __init__(self, otherRect_or_left = 0, top = 0, right = 0, bottom = 0):
"""Provide a constructor for RECT structures
A RECT can be constructed by:
- Another RECT (each value will be copied)
- Values for left, top, right and bottom
e.g. my_rect = RECT(otherRect)
or my_rect = RECT(10, 20, 34, 100)
"""
if isinstance(otherRect_or_left, RECT):
self.left = otherRect_or_left.left
self.right = otherRect_or_left.right
self.top = otherRect_or_left.top
self.bottom = otherRect_or_left.bottom
else:
#if not isinstance(otherRect_or_left, (int, long)):
# print type(self), type(otherRect_or_left), otherRect_or_left
if sys.version[0] == '3':
self.left = otherRect_or_left
self.right = right
self.top = top
self.bottom = bottom
else:
self.left = long(otherRect_or_left)
self.right = long(right)
self.top = long(top)
self.bottom = long(bottom)
# #----------------------------------------------------------------
# def __eq__(self, otherRect):
# "return true if the two rectangles have the same coordinates"
#
# try:
# return \
# self.left == otherRect.left and \
# self.top == otherRect.top and \
# self.right == otherRect.right and \
# self.bottom == otherRect.bottom
# except AttributeError:
# return False
#----------------------------------------------------------------
def __str__(self):
"Return a string representation of the RECT"
return "(L%d, T%d, R%d, B%d)" % (
self.left, self.top, self.right, self.bottom)
#----------------------------------------------------------------
def __repr__(self):
"Return some representation of the RECT"
return "<RECT L%d, T%d, R%d, B%d>" % (
self.left, self.top, self.right, self.bottom)
#----------------------------------------------------------------
def __sub__(self, other):
"Return a new rectangle which is offset from the one passed in"
newRect = RECT()
newRect.left = self.left - other.left
newRect.right = self.right - other.left
newRect.top = self.top - other.top
newRect.bottom = self.bottom - other.top
return newRect
#----------------------------------------------------------------
def __add__(self, other):
"Allow two rects to be added using +"
newRect = RECT()
newRect.left = self.left + other.left
newRect.right = self.right + other.left
newRect.top = self.top + other.top
newRect.bottom = self.bottom + other.top
|
vaidap/zulip
|
zerver/views/events_register.py
|
Python
|
apache-2.0
| 2,368
| 0.005912
|
from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from typing import Iterable, Optional, Sequence, Text
fro
|
m zerver.lib.events import do_events_register
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.validator import check_string, check_list, check_bool
from zerver.models import Stream, UserProfile
def _default_all_public_streams(user_profile, all_public_streams):
# type: (UserProfile, Optional[bool]) -> bool
if all_public_streams is not None:
return all_public_streams
else:
return
|
user_profile.default_all_public_streams
def _default_narrow(user_profile, narrow):
# type: (UserProfile, Iterable[Sequence[Text]]) -> Iterable[Sequence[Text]]
default_stream = user_profile.default_events_register_stream # type: Optional[Stream]
if not narrow and default_stream is not None:
narrow = [['stream', default_stream.name]]
return narrow
@has_request_variables
def events_register_backend(request, user_profile,
apply_markdown=REQ(default=False, validator=check_bool),
all_public_streams=REQ(default=None, validator=check_bool),
include_subscribers=REQ(default=False, validator=check_bool),
event_types=REQ(validator=check_list(check_string), default=None),
fetch_event_types=REQ(validator=check_list(check_string), default=None),
narrow=REQ(validator=check_list(check_list(check_string, length=2)), default=[]),
queue_lifespan_secs=REQ(converter=int, default=0)):
# type: (HttpRequest, UserProfile, bool, Optional[bool], bool, Optional[Iterable[str]], Optional[Iterable[str]], Iterable[Sequence[Text]], int) -> HttpResponse
all_public_streams = _default_all_public_streams(user_profile, all_public_streams)
narrow = _default_narrow(user_profile, narrow)
ret = do_events_register(user_profile, request.client, apply_markdown,
event_types, queue_lifespan_secs, all_public_streams,
narrow=narrow, include_subscribers=include_subscribers,
fetch_event_types=fetch_event_types)
return json_success(ret)
|
helfertool/helfertool
|
src/news/migrations/0002_person_token.py
|
Python
|
agpl-3.0
| 478
| 0.002092
|
# -*- coding: utf-8 -*-
# Generat
|
ed by Django 1.10.6 on 2017-03-26 10:54
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='person',
name='token',
field=models.UUIDField(de
|
fault=uuid.uuid4, editable=False, null=True),
),
]
|
Azure/azure-sdk-for-python
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2018_02_01_preview/_configuration.py
|
Python
|
mit
| 3,281
| 0.004572
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credenti
|
als import TokenCredential
class ContainerRegistryManagementClientConfiguration(Configuration):
"""Configuration for ContainerRegistryManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The Microsoft Azure subscription ID.
:type subscription_id: str
"""
|
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(ContainerRegistryManagementClientConfiguration, self).__init__(**kwargs)
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2018-02-01-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-containerregistry/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
|
narfman0/challenges
|
algorithms/tests/sorting/test_merge.py
|
Python
|
gpl-3.0
| 1,709
| 0.00117
|
from unittest import TestCase
from sorting.merge import merge_list, merge_sort, merge_sort_recursive
class TestSortingMerge(TestCase):
def test_merge_list(self):
left = [1, 4, 6]
right = [2, 3, 5]
result = merge_list(left, right)
self.assertEquals(6, len(result))
for i in range(len(result)):
self.assertEquals(i + 1, result[i])
left = [1, 4, 6, 7, 8, 9]
right = [2, 3, 5]
result = merge_list(left, right)
self.assertEquals(9, len(result))
for i in range(len(result)):
self.assertEquals(i + 1, result[i])
def test_merge_sort(self):
array = [1, 7, 5, 4, 6, 8, 5, 3, 9]
result = merge_sort(array)
self.assertEquals(9, len(result))
self.assertEquals(1, result[0])
self.assertEquals(3, result[1])
self.assertEquals(4, result[2])
self.assertEquals(5, result[3])
self.assertEquals(5, result[4])
|
self.assertEquals(6, result[5])
self.assertEquals(7, result[6])
self.assertEquals(8, result[7])
self.assertEquals(9, result[8])
def test_merge_sort_recursive(self):
array = [1, 7, 5, 4, 6, 8, 5, 3, 9, 8]
result = merge_sort_recursive(array)
self.assertEquals(10, len(result))
self.assertEquals(1, result[0])
self
|
.assertEquals(3, result[1])
self.assertEquals(4, result[2])
self.assertEquals(5, result[3])
self.assertEquals(5, result[4])
self.assertEquals(6, result[5])
self.assertEquals(7, result[6])
self.assertEquals(8, result[7])
self.assertEquals(8, result[8])
self.assertEquals(9, result[9])
|
ingadhoc/product
|
product_management_group/__manifest__.py
|
Python
|
agpl-3.0
| 1,296
| 0
|
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.g
|
nu.org/licenses/>.
#
##############################################################################
{
'name': 'Products Management Group',
|
'version': '13.0.1.0.0',
'category': 'base.module_category_knowledge_management',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'depends': [
'sale',
],
'data': [
'security/product_management_security.xml',
],
'installable': False,
}
|
rabipanda/tensorflow
|
tensorflow/python/tools/saved_model_cli.py
|
Python
|
apache-2.0
| 29,293
| 0.006111
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Command-line interface to inspect and execute a graph in a SavedModel.
For detailed usages and examples, please refer to:
https://www.tensorflow.org/programmers_guide/saved_model_cli
"""
from __future__ import absolute_import
from __future__ import division
from __futu
|
re__ import print_function
import argparse
import os
import re
import sys
import warnings
import numpy as np
from six import integer_types
from tensorflow.cont
|
rib.saved_model.python.saved_model import reader
from tensorflow.contrib.saved_model.python.saved_model import signature_def_utils
from tensorflow.core.example import example_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.platform import app
from tensorflow.python.saved_model import loader
from tensorflow.python.tools import saved_model_utils
def _show_tag_sets(saved_model_dir):
"""Prints the tag-sets stored in SavedModel directory.
Prints all the tag-sets for MetaGraphs stored in SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
tag_sets = reader.get_saved_model_tag_sets(saved_model_dir)
print('The given SavedModel contains the following tag-sets:')
for tag_set in sorted(tag_sets):
print(', '.join(sorted(tag_set)))
def _show_signature_def_map_keys(saved_model_dir, tag_set):
"""Prints the keys for each SignatureDef in the SignatureDef map.
Prints the list of SignatureDef keys from the SignatureDef map specified by
the given tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
tag_set: Group of tag(s) of the MetaGraphDef to get SignatureDef map from,
in string format, separated by ','. For tag-set contains multiple tags,
all tags must be passed in.
"""
signature_def_map = get_signature_def_map(saved_model_dir, tag_set)
print('The given SavedModel MetaGraphDef contains SignatureDefs with the '
'following keys:')
for signature_def_key in sorted(signature_def_map.keys()):
print('SignatureDef key: \"%s\"' % signature_def_key)
def _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def,
signature_def_key):
"""Gets TensorInfo for all inputs of the SignatureDef.
Returns a dictionary that maps each input key to its TensorInfo for the given
signature_def_key in the meta_graph_def
Args:
meta_graph_def: MetaGraphDef protocol buffer with the SignatureDef map to
look up SignatureDef key.
signature_def_key: A SignatureDef key string.
Returns:
A dictionary that maps input tensor keys to TensorInfos.
"""
return signature_def_utils.get_signature_def_by_key(meta_graph_def,
signature_def_key).inputs
def _get_outputs_tensor_info_from_meta_graph_def(meta_graph_def,
signature_def_key):
"""Gets TensorInfos for all outputs of the SignatureDef.
Returns a dictionary that maps each output key to its TensorInfo for the given
signature_def_key in the meta_graph_def.
Args:
meta_graph_def: MetaGraphDef protocol buffer with the SignatureDefmap to
look up signature_def_key.
signature_def_key: A SignatureDef key string.
Returns:
A dictionary that maps output tensor keys to TensorInfos.
"""
return signature_def_utils.get_signature_def_by_key(meta_graph_def,
signature_def_key).outputs
def _show_inputs_outputs(saved_model_dir, tag_set, signature_def_key):
"""Prints input and output TensorInfos.
Prints the details of input and output TensorInfos for the SignatureDef mapped
by the given signature_def_key.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
tag_set: Group of tag(s) of the MetaGraphDef, in string format, separated by
','. For tag-set contains multiple tags, all tags must be passed in.
signature_def_key: A SignatureDef key string.
"""
meta_graph_def = saved_model_utils.get_meta_graph_def(saved_model_dir,
tag_set)
inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
print('The given SavedModel SignatureDef contains the following input(s):')
for input_key, input_tensor in sorted(inputs_tensor_info.items()):
print('inputs[\'%s\'] tensor_info:' % input_key)
_print_tensor_info(input_tensor)
print('The given SavedModel SignatureDef contains the following output(s):')
for output_key, output_tensor in sorted(outputs_tensor_info.items()):
print('outputs[\'%s\'] tensor_info:' % output_key)
_print_tensor_info(output_tensor)
print('Method name is: %s' %
meta_graph_def.signature_def[signature_def_key].method_name)
def _print_tensor_info(tensor_info):
"""Prints details of the given tensor_info.
Args:
tensor_info: TensorInfo object to be printed.
"""
print(' dtype: ' +
{value: key
for (key, value) in types_pb2.DataType.items()}[tensor_info.dtype])
# Display shape as tuple.
if tensor_info.tensor_shape.unknown_rank:
shape = 'unknown_rank'
else:
dims = [str(dim.size) for dim in tensor_info.tensor_shape.dim]
shape = ', '.join(dims)
shape = '(' + shape + ')'
print(' shape: ' + shape)
print(' name: ' + tensor_info.name)
def _show_all(saved_model_dir):
"""Prints tag-set, SignatureDef and Inputs/Outputs information in SavedModel.
Prints all tag-set, SignatureDef and Inputs/Outputs information stored in
SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
tag_sets = reader.get_saved_model_tag_sets(saved_model_dir)
for tag_set in sorted(tag_sets):
tag_set = ', '.join(tag_set)
print('\nMetaGraphDef with tag-set: \'' + tag_set +
'\' contains the following SignatureDefs:')
signature_def_map = get_signature_def_map(saved_model_dir, tag_set)
for signature_def_key in sorted(signature_def_map.keys()):
print('\nsignature_def[\'' + signature_def_key + '\']:')
_show_inputs_outputs(saved_model_dir, tag_set, signature_def_key)
def get_meta_graph_def(saved_model_dir, tag_set):
"""DEPRECATED: Use saved_model_utils.get_meta_graph_def instead.
Gets MetaGraphDef from SavedModel. Returns the MetaGraphDef for the given
tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect or execute.
tag_set: Group of tag(s) of the MetaGraphDef to load, in string format,
separated by ','. For tag-set contains multiple tags, all tags must be
passed in.
Raises:
RuntimeError: An error when the given tag-set does not exist in the
SavedModel.
Returns:
A MetaGraphDef corresponding to the tag-set.
"""
return saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set)
def get_signature_def_map(saved_model_dir, tag_set):
"""Gets SignatureDef map from a MetaGraphDef in a SavedModel.
Returns the SignatureDef map for the given tag-set in the SavedModel
directory.
Args:
s
|
filipecn/lazycf
|
tests/Test_LazyCF.py
|
Python
|
mit
| 1,628
| 0.001229
|
#!/usr/bin/py
import os
import sys
import shutil
import unittest
sys.path.append(os.path.abspath('..'))
from sample.LazyCF import LazyCF
from sample.CodeForces import CodeForces
class TestLazyCF(unittest.TestCase):
def test__init__(self):
lazy_test = LazyCF()
self.assertEqual(str(lazy_test.__class__), "sample.LazyCF.LazyCF")
def test_get_contest(self):
cf = LazyCF()
cf.get_contest(768)
self.assertEqual(cf.contest.id, 768)
def test_create_folder(self):
cf = CodeForces()
contest_test = cf.get_contest(768)
lazy = LazyCF()
lazy.create_folder(contest_test)
path = os.path.abspath('.')
for folder in contest_test.problem_list:
self.assertTru
|
e(os.path.isdir(path + "/" + folder.index))
shutil.rmtree(path + "/" + folder.index)
def test_create_file(self):
cf = CodeForces()
contest_test = cf.get_contest(768)
lazy = LazyCF()
lazy.create_folder(contest_test)
path = os.path.abspath('.')
for folder in contest_test.problem_list:
path_folder = path + "/" + folder.index
self.assertTrue(os.p
|
ath.isdir(path_folder))
i = 0
for cases in folder.test:
i += 1
name_file = "input_" + str(i)
lazy.create_file(cases.input_text, name_file, path_folder)
name_file = "output_" + str(i)
lazy.create_file(cases.output_text, name_file, path_folder)
shutil.rmtree(path_folder)
if __name__ == "__main__":
unittest.main()
|
m-mix/djangocms-bootstrap3-grid
|
setup.py
|
Python
|
bsd-2-clause
| 1,149
| 0
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from djangocms_bootstrap3 import __version__
INSTALL_REQUIRES = []
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topi
|
c :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
]
setup(
name='djangocms-bootstr
|
ap3-grid',
version=__version__,
description='Bootstrap3 grid system plugin for django CMS',
author='Maidakov Mikhail',
author_email='m-email@inbox.com',
url='https://github.com/m-mix/djangocms-bootstrap3-grid',
packages=find_packages(exclude=[]),
install_requires=INSTALL_REQUIRES,
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.rst').read(),
include_package_data=True,
zip_safe=False
)
|
gangadhar-kadam/powapp
|
selling/doctype/device_group/device_group.py
|
Python
|
agpl-3.0
| 1,071
| 0.035481
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# For license information, please see license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import msgprint, _
from webnotes.utils import cint,cstr
class DocType:
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
if self.doc.account_id and self.doc.group_id:
self.doc.name = self.doc.account_id+"_"+self.doc.group_id
else:
msgprint("Please Create Device Group",raise_exception=1)
def on_update(self):
self.doc.account_id=self.doc.account_id.lower()
self.doc.group_id=self.doc.group_id.lower()
self.doc.name=self.doc.name.lower()
self.doc.group_id=self.d
|
oc.name
#if self.doc.account_id:
#a=webnotes.conn.sql("select account_id from `tabFranchise`")
qry= "insert into DeviceGroup (accountID,groupID) values ('"+cstr(self.doc.account_id)+"','"+cstr(self.doc.group_id)+"')"
|
webnotes.conn.sql(qry)
webnotes.errprint(qry)
#else:
# pass
|
urisimchoni/samba
|
third_party/pep8/testsuite/W29.py
|
Python
|
gpl-3.0
| 367
| 0.016438
|
#: Okay
# 情
#: W291:1:6
print
#: W293:2:1
class Foo(object):
bang = 12
#: W291:2:35
'''multiline
string with trailing whitespace'''
#: W292:1:36 noeol
# This line doesn't have a linefeed
#: W292:1:5 E225:1:2 noeol
1+ 1
|
#: W292:1:27 E261:1:12 noeol
import this # no line feed
#:
|
W292:3:22 noeol
class Test(object):
def __repr__(self):
return 'test'
|
haum/hms_irc
|
hms_irc/commands/tests/test_agenda.py
|
Python
|
gpl-3.0
| 4,323
| 0.000232
|
import pytest
from hms_irc.commands.agenda import get_instance
from hms_irc.commands.tests import build_command
@pytest.fixture
def instance(irc_server, irc_chan, rabbit):
return get_instance(irc_server, irc_chan, rabbit)
# Misc
def test_commands_available(instance):
"""Test that all required subcommands are available."""
required = ["add_seance", "ad
|
d", "modify", "remove", "all", "help"]
present = list(instance.subcommand_names())
for item in required:
assert("cmd_" + item in present)
# Basic argument checking
def test_invalid_argument(instance):
"""Test to call the agenda c
|
ommand with an invalid argument."""
instance.handle(build_command("agenda lolilol"))
instance.rabbit.publish.assert_not_called()
def test_bad_argument(instance):
"""Test to call a valid command with a bad format."""
instance.handle(build_command("remove toto", voiced=True))
instance.rabbit.publish.assert_not_called()
def test_no_arguments(instance):
"""Test to call the agenda command without any argument."""
instance.handle(build_command("agenda"))
instance.rabbit.publish.assert_called_with('agenda.query', {
'command': 'list',
'source': 'irc'})
# Non-voiced commands
def test_list_all(instance):
"""Test list all the events in the agenda."""
instance.handle(build_command("agenda all"))
instance.rabbit.publish.assert_called_with('agenda.query', {
'command': 'list',
'arguments': {'all': True},
'source': 'irc'})
def test_help(instance):
"""Try to call the help command of agenda."""
instance.handle(build_command("agenda help"))
instance.rabbit.publish.assert_not_called()
# Test that unvoiced user cannot call voiced commands
def test_add_not_voiced(instance):
"""Test to execute a voiced command as non-voiced user."""
instance.handle(build_command("add 42"))
instance.rabbit.publish.assert_not_called()
def test_add_seance_not_voiced(instance):
"""Test to execute a voiced command as non-voiced user."""
instance.handle(build_command("add_seance 42"))
instance.rabbit.publish.assert_not_called()
def test_modify_not_voiced(instance):
"""Test to execute a voiced command as non-voiced user."""
instance.handle(build_command("modify 42"))
instance.rabbit.publish.assert_not_called()
def test_remove_not_voiced(instance):
"""Test to execute a voiced command as non-voiced user."""
instance.handle(build_command("remove 42"))
instance.rabbit.publish.assert_not_called()
# Voiced commands
def test_add(instance):
"""Try to add an event to the agenda."""
args = ("add 10/11/2017 17:45 \"Local du HAUM\" \"Test débile\" Un "
"super test complètement débile")
instance.handle(build_command("agenda " + args, voiced=True))
instance.rabbit.publish.assert_called_with('agenda.query', {
'command': 'add',
'source': 'irc',
'arguments': {
'date': '10/11/2017 17:45',
'location': 'Local du HAUM',
'title': 'Test débile',
'desc': 'Un super test complètement débile'}})
def test_add_seance(instance):
"""Try to add a seance to the agenda."""
args = "add_seance 10/11/2017 11:42"
instance.handle(build_command("agenda " + args, voiced=True))
instance.rabbit.publish.assert_called_with('agenda.query', {
'command': 'add_seance',
'source': 'irc',
'arguments': {
'date': '10/11/2017 11:42'}})
def test_modify(instance):
"""Try to modify an event already in the agenda."""
args = "modify 42 titre Un super nouveau titre"
instance.handle(build_command("agenda " + args, voiced=True))
instance.rabbit.publish.assert_called_with('agenda.query', {
'command': 'modify',
'source': 'irc',
'arguments': {
'id': 42,
'field': 'titre',
'new_value': 'Un super nouveau titre'}})
def test_remove(instance):
"""Try to remove an event already in the agenda."""
args = "remove 42"
instance.handle(build_command("agenda " + args, voiced=True))
instance.rabbit.publish.assert_called_with('agenda.query', {
'command': 'remove',
'source': 'irc',
'arguments': {
'id': 42}})
|
rolandgeider/wger
|
wger/utils/generic_views.py
|
Python
|
agpl-3.0
| 10,530
| 0.00095
|
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Standard Library
import logging
# Django
from django.contri
|
b import messages
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.http import (
HttpResponseForbidden,
HttpResponseRedirect
)
from django.urls import reverse_lazy
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy
from django.views.generic import TemplateView
from django.views.generic.edit import ModelFormMixin
# Third Party
import bleach
from crispy_forms.helper import FormHelper
from crispy_forms.layout import (
|
ButtonHolder,
Layout,
Submit
)
# wger
from wger.utils.constants import (
HTML_ATTRIBUTES_WHITELIST,
HTML_STYLES_WHITELIST,
HTML_TAG_WHITELIST
)
logger = logging.getLogger(__name__)
class WgerMultiplePermissionRequiredMixin(PermissionRequiredMixin):
"""
A PermissionRequiredMixin that checks that the user has at least one permission
instead of all of them.
"""
def has_permission(self):
for permission in self.get_permission_required():
if self.request.user.has_perm(permission):
return True
return False
class WgerPermissionMixin(object):
"""
Custom permission mixim
This simply checks that the user has the given permissions to access a
resource and makes writing customized generic views easier.
"""
permission_required = False
"""
The name of the permission required to access this class.
This can be a string or a tuple, in the latter case having any of the permissions
listed is enough to access the resource
"""
login_required = False
"""
Set to True to restrict view to logged in users
"""
def dispatch(self, request, *args, **kwargs):
"""
Check permissions and dispatch
"""
if self.login_required or self.permission_required:
if not request.user.is_authenticated:
return HttpResponseRedirect(reverse_lazy('core:user:login')
+ '?next={0}'.format(request.path))
if self.permission_required:
has_permission = False
if isinstance(self.permission_required, tuple):
for permission in self.permission_required:
if request.user.has_perm(permission):
has_permission = True
elif request.user.has_perm(self.permission_required):
has_permission = True
if not has_permission:
return HttpResponseForbidden('You are not allowed to access this object')
# Dispatch normally
return super(WgerPermissionMixin, self).dispatch(request, *args, **kwargs)
# , PermissionRequiredMixin
class WgerFormMixin(ModelFormMixin):
template_name = 'form.html'
custom_js = ''
"""
Custom javascript to be executed.
"""
form_action = ''
form_action_urlname = ''
sidebar = ''
"""
Name of a template that will be included in the sidebar
"""
title = ''
"""
Title used in the form
"""
owner_object = False
"""
The object that holds the owner information. This only needs to be set if
the model doesn't provide a get_owner_object() method
"""
submit_text = ugettext_lazy('Save')
"""
Text used in the submit button, default _('save')
"""
clean_html = ()
"""
List of form fields that should be passed to bleach to clean the html
"""
messages = ''
"""
A message to display on success
"""
def get_context_data(self, **kwargs):
"""
Set context data
"""
context = super(WgerFormMixin, self).get_context_data(**kwargs)
context['sidebar'] = self.sidebar
context['title'] = self.title
# Custom JS code on form (autocompleter, editor, etc.)
context['custom_js'] = self.custom_js
return context
def dispatch(self, request, *args, **kwargs):
"""
Custom dispatch method.
This basically only checks for ownerships of editable/deletable
objects and return a HttpResponseForbidden response if the user
is not the owner.
"""
# These seem to be necessary for calling get_object
self.kwargs = kwargs
self.request = request
# For new objects, we have to manually load the owner object
if self.owner_object:
owner_object = self.owner_object['class'].objects.get(
pk=kwargs[self.owner_object['pk']])
else:
# On CreateViews we don't have an object, so just ignore it
try:
owner_object = self.get_object().get_owner_object()
except AttributeError:
owner_object = False
# Nothing to see, please move along
if owner_object and owner_object.user != self.request.user:
return HttpResponseForbidden('You are not allowed to access this object')
# Dispatch normally
return super(WgerFormMixin, self).dispatch(request, *args, **kwargs)
def get_messages(self):
"""
Getter for success message. Can be overwritten to e.g. to provide the
name of the object.
"""
return self.messages
def get_form(self, form_class=None):
"""Return an instance of the form to be used in this view."""
form = super(WgerFormMixin, self).get_form(form_class)
if not hasattr(form, "helper"):
form.helper = FormHelper()
form.helper.form_id = slugify(self.request.path)
form.helper.form_method = 'post'
form.helper.form_action = self.request.path
form.helper.add_input(Submit('submit', self.submit_text, css_class='btn-success btn-block'))
form.helper.form_class = 'wger-form'
return form
def form_invalid(self, form):
"""
Log form errors to the console
"""
logger.debug(form.errors)
return super(WgerFormMixin, self).form_invalid(form)
def form_valid(self, form):
"""
Pre-process the form, cleaning up the HTML code found in the fields
given in clean_html. All HTML tags, attributes and styles not in the
whitelists are stripped from the output, leaving only the text content:
<table><tr><td>foo</td></tr></table> simply becomes 'foo'
"""
for field in self.clean_html:
setattr(form.instance, field, bleach.clean(getattr(form.instance, field),
tags=HTML_TAG_WHITELIST,
attributes=HTML_ATTRIBUTES_WHITELIST,
styles=HTML_STYLES_WHITELIST,
strip=True))
if self.get_messages():
messages.success(self.request, self.get_messages())
return super(WgerFormMixin, self).form_valid(form)
class WgerDeleteMixin(ModelFormMixin):
form_action = ''
form_action_urlname = ''
title = ''
delete_message_extra = ''
delete_message = ugettext_lazy('Yes, delete')
template_name = 'delete.html'
messages = ''
def get_context_data(self, **kwargs):
"""
Set necessary template data to correctly render the form
"""
# Call the base implementation first to get a context
|
wk8/Brive
|
backend.py
|
Python
|
unlicense
| 8,783
| 0
|
# -*- coding: utf-8 -*-
import os
import errno
import time
import tarfile
import shutil
import re
from utils import *
import configuration
# a helper class for actual backends
class BaseBackend(object):
def __init__(self, keep_dirs):
self._root_dir = configuration.Configuration.get(
'backend_root_dir', not_null=True
)
self._keep_dirs = keep_dirs
self._session_name = self._generate_session_name()
Log.verbose(u'Current session: {}'.format(self._session_name))
# can be overriden for more elaborate backends
def need_to_fetch_contents(self, user, document):
return True
# equivalent to *nix's _mkdir -p
def _mkdir(self, path=''):
try:
os.makedirs(os.path.join(self._root_dir, path))
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
# equivalent to *nix's rm -rf
def _delete(self, name):
path = os.path.join(self._root_dir, name)
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.isfile(path):
os.remove(path)
def finalize(self):
Log.verbose(u'Finalazing session: {}'.format(self._session_name))
# called when this user is done
def close_user(self, user):
pass
# called to save that doc for that user
def save(self, user, document):
pass
# called to clean up if there was an exception halfway through
def clean_up(self):
pass
# should return the backup dir of file name for that login
def _get_backup_name_for_user(self, login):
return login
# should return the login from a backup dir or file name
# reverse of _get_backup_name_for_user
def _get_login_from_name(self, name):
return name
# UTC ISO-8601 time
_UTC_TIME_PATTERN = r'%Y-%m-%dT%H%M%SZ'
@staticmethod
def _generate_session_name():
return time.strftime(BaseBackend._UTC_TIME_PATTERN, time.gmtime())
@staticmethod
def _date_from_session_name(name):
return time.strptime(name, BaseBackend._UTC_TIME_PATTERN)
# doens't do anything, just say it was asked to save
# mainly for debugging purposes
class DummyBackend(BaseBackend):
def save(self, user, document):
print u'Backend save for user {}: {}'.format(user, repr(document))
# simplest backend possible: just download everything
class SimpleBackend(BaseBackend):
def __init__(self, keep_dirs):
super(SimpleBackend, self).__init__(keep_dirs)
self._mkdir(self._session_name)
self._current_dir = os.path.join(self._root_dir, self._session_name)
Log.debug('SimpleBackend loaded')
def save(self, user, document):
path = self._get_path(user, document)
self._mkdir(os.path.join(self._session_name, path))
prefix = os.path.join(self._current_dir, path)
for document_content in document.contents:
path = os.path.join(prefix, document_content.file_name)
Log.debug(u'Writing {}\'s {} to {}'.format(
user.login, document.title, path
))
f = open(path, 'w')
document_content.write_to_file(f)
f.close()
def clean_up(self):
Log.verbose(u'Unexpected shutdown, deleting {} folder'
.format(self._current_dir))
self._delete(self._session_name)
|
def _get_path(self, user, document):
path = os.path.join(
user.login, document.path if self._keep_dirs else ''
)
return path
# returns Non
|
e if the current name is not a backup dir
# and the date for this backup if it is
def _get_backup_date(self, name):
if os.path.isdir(os.path.join(self._root_dir, name)):
try:
return self._date_from_session_name(name)
except ValueError:
# not a backup dir!
pass
return None
_SECS_PER_DAY = 86400
# returns true iff name is a backup dir older than
# the prescribed # of days
def _should_delete_old_saves(self, name, days):
bckup_time = self._get_backup_date(name)
if not bckup_time:
return False
current_session_date = self._date_from_session_name(self._session_name)
diff = time.mktime(current_session_date) - time.mktime(bckup_time)
return diff > days * self._SECS_PER_DAY
def _delete_old_saves_in_session(self, session_name, logins):
current_bckup = os.path.join(self._root_dir, session_name)
Log.debug(u'Processing old session {}'.format(current_bckup))
for name in os.listdir(current_bckup):
login = self._get_login_from_name(name)
if login and login in logins:
path_to_del = os.path.join(session_name, name)
Log.verbose(u'Deleting obsolete path {}'.format(path_to_del))
self._delete(path_to_del)
# delete the whole dir if there's nothing left
try:
os.rmdir(current_bckup)
Log.verbose(u'Deleting empty backup dir {}'.format(current_bckup))
except OSError as ex:
# ignore it if it's just not empty
if ex.errno != errno.ENOTEMPTY:
raise
# deletes previous saves for those logins,
# dating back more than the provided # of days
def _do_delete_old_saves(self, logins, days):
Log.verbose(
u'Deleting backups older than {} days for users {}'
.format(days, logins)
)
for name in os.listdir(self._root_dir):
if self._should_delete_old_saves(name, days):
self._delete_old_saves_in_session(name, logins)
# deletes previous saves for users successfully saved during the current
# session, whose previous backup is older than the provided # of days
def delete_old_saves(self, days):
Log.debug('About to delete old backups...')
self._do_delete_old_saves(
[self._get_login_from_name(name)
for name in os.listdir(self._current_dir)],
days
)
# also downloads everything, but compresses it
class TarBackend(SimpleBackend):
def __init__(self, keep_dirs):
super(TarBackend, self).__init__(keep_dirs)
# get the compression format
self._format = configuration.Configuration.get(
'backend_compression_format', not_null=True
)
if self._format not in ('gz', 'bz2'):
raise Exception(
'The compression format must be either gz or bz2, ' +
u'{} given'.format(format)
)
self._tar_files = dict()
Log.debug('TarBackend loaded')
# should return the backup dir of file name for that login
def _get_backup_name_for_user(self, login):
return login + '.tar.' + self._format
_login_from_name_regex = re.compile(r'^(.*)\.tar\.(gz|bz2)$')
# should return the login from a backup dir or file name
# reverse of _get_backup_name_for_user
def _get_login_from_name(self, name):
try:
return self._login_from_name_regex.findall(name)[0][0]
except IndexError:
# no match
return None
def _get_tarfile(self, user, create_if_doesnt_exist=True):
# create the tarfile if we don't have one for this user yet
if user.login not in self._tar_files:
if not create_if_doesnt_exist:
return None
name = os.path.join(
self._current_dir, self._get_backup_name_for_user(user.login)
)
self._tar_files[user.login] = tarfile.open(
name, 'w:' + self._format
)
return self._tar_files[user.login]
def save(self, user, document):
tar_file = self._get_tarfile(user)
for document_content in document.contents:
path = self._get_path(user, document)
path = os.path.join(path, document_content.file_name)
Log.debug(u'Writing {}\'s {} to {}'.format(
user.login, document.title, path
))
|
yunfanz/ReionBub
|
Choud14/FZH04.py
|
Python
|
mit
| 5,526
| 0.056279
|
import numpy as n, matplotlib.pyplot as p, scipy.special
import cosmolopy.perturbation as pb
import cosmolopy.density as cd
from scipy.integrate import quad,tplquad
import itertools
from scipy.interpolate import interp1d
from scipy.interpolate import RectBivariateSpline as RBS
import optparse, sys
from sigmas import sig0
o = optparse.OptionParser()
o.add_option('-d','--del0', dest='del0', default=5.)
o.add_option('-m','--mul', dest='mul', default=1.)
o.add_option('-z','--red', dest='red', default=12.)
opts,args = o.parse_args(sys.argv[1:])
print opts, args
Om,sig8,ns,h,Ob = 0.315, 0.829, 0.96, 0.673, 0.0487
Planck13 = {'baryonic_effects':True,'omega_k_0':0,'omega_M_0':0.315, 'omega_b_0':0.0487, 'n':0.96, 'N_nu':0, 'omega_lambda_0':0.685,'omega_n_0':0., 'sigma_8':0.829,'h':0.673}
cosmo = Planck13
def m2R(m):
rhobar = cd.cosmo_densities(**cosmo)[1] #msun/Mpc
RL = (3*m/4/n.pi/rhobar)**(1./3)
return RL
def m2V(m):
rhobar = cd.cosmo_densities(**cosmo)[1] #msun/Mpc
return m/rhobar
def R2m(RL):
rhobar = cd.cosmo_densities(**cosmo)[1] #msun/Mpc
m = 4*n.pi/3*rhobar*RL**3
return m
def mmin(z,Tvir=1.E4):
return pb.virial_mass(Tvir,z,**cosmo)
dmS = n.load('m2S.npz')
MLtemp,SLtemp = dmS['arr_0'],dmS['arr_1']
fs2m = interp1d(SLtemp,MLtemp,kind='cubic')
def S2M(S):
return fs2m(S)
def Deltac(z):
fgrowth = pb.fgrowth(z, cosmo['omega_M_0']) # = D(z)/D(0)
return 1.686/fgrowth
#return 1.68
|
6*fgrowth
######################## SIZE DI
|
STRIBUTION #############################
####################### FZH04 ##############################
def fFZH(S,zeta,B0,B1):
res = B0/n.sqrt(2*n.pi*S**3)*n.exp(-B0**2/2/S-B0*B1-B1**2*S/2)
return res
def BFZH(S0,deltac,smin,K):
return deltac-n.sqrt(2*(smin-S0))*K
def BFZHlin(S0,deltac,smin,K):
b0 = deltac-K*n.sqrt(2*smin)
b1 = K/n.sqrt(2*smin)
return b0+b1*S0
def dlnBFdlnS0(S0,deltac,smin,K,d=0.001):
Bp,Bo,Bm = BFZH(S0+d,deltac,smin,K), BFZH(S0,deltac,smin,K), BFZH(S0-d,deltac,smin,K)
return S0/Bo*(Bp-Bm)/2/d
def dlnBFlindlnS0(S0,deltac,smin,K,d=0.001):
Bp,Bo,Bm = BFZHlin(S0+d,deltac,smin,K), BFZHlin(S0,deltac,smin,K), BFZHlin(S0-d,deltac,smin,K)
return S0/Bo*(Bp-Bm)/2/d
##### m_min
dDoZ = n.load('theta.npz')
thetal,DoZl = dDoZ['arr_0'],dDoZ['arr_1']
ftheta = interp1d(DoZl,thetal,kind='cubic')
def theta(z,del0):
return ftheta(del0/(1+z))
def RphysoR0(del0,z):
th = theta(z,del0)
return 3./10/del0*(1-n.cos(th))
def RcovEul(del0,z):
return RphysoR0(del0,z)*(1+z)
def dlinSdlnR(lnR,d=0.001):
res = (n.log(sig0(n.exp(lnR+d)))-n.log(sig0(n.exp(lnR-d))))/d/2
return n.abs(res)
################################## MAIN ######################################
for z in [12., 16.]:
PLOT = True
zeta = 40.
K = scipy.special.erfinv(1-1./zeta)
Tvir = 1.E4
#z = 12.
deltac = Deltac(z)
mm = mmin(z)
M0min = zeta*mm
RLmin,R0min = m2R(mm), m2R(M0min)
print 'R',RLmin
smin = sig0(RLmin)
Rmin = R0min*RcovEul(deltac,z) #S0=smin, so del0=deltac; convertion from lagragian to comoving eulerian
####### FZH04 #######
bFZH0 = deltac-K*n.sqrt(2*smin)
bFZH1 = K/n.sqrt(2*smin)
#bFZH = deltac-n.sqrt(2*(smin-S0))*K
#bFZHlin = bFZH0+bFZH1*S0
def dlnRdlnR0(lnR0,S0,del0):
S0 = sig0(n.exp(lnR0))
del0 = BFZH(S0,deltac,smin,K)
th = theta(z,del0)
thfactor = 1-3./2*th*(th-n.sin(th))/(1-n.cos(th))**2
res = 1-dlinSdlnR(lnR0)*dlnBFdlnS0(S0,deltac,smin,K)*thfactor
return res
def V0dndlnR0(lnR0):
S0 = sig0(n.exp(lnR0))
return S0*fFZH(S0,zeta,bFZH0,bFZH1)*dlinSdlnR(lnR0)
def VdndlnR0(lnR0):
S0 = sig0(n.exp(lnR0))
del0 = BFZHlin(S0,deltac,smin,K)
#lnR0 = n.log(n.exp(lnR)/RcovEul(del0,z))
VoV0 = (RcovEul(del0,z))**3
#return VoV0/dlnRdlnR0(lnR0,S0,del0)*S0*fFZH(S0,zeta,bFZH0,bFZH1)*dlinSdlnR(lnR0)
return VoV0*S0*fFZH(S0,zeta,bFZH0,bFZH1)*dlinSdlnR(lnR0)
def VdndlnR(lnR0):
S0 = sig0(n.exp(lnR0))
del0 = BFZH(S0,deltac,smin,K)
VoV0 = (RcovEul(del0,z))**3
return VoV0/dlnRdlnR0(lnR0,S0,del0)*S0*fFZH(S0,zeta,bFZH0,bFZH1)*dlinSdlnR(lnR0)
if True:
print 'computing z=',z
#Q = quad(lambda lnR: VdndlnR(lnR),n.log(Rmin),3.5) #integrated over eulerian coordinates
Q = quad(lambda lnR0: VdndlnR0(lnR0),n.log(R0min),3.5) #integrated over eulerian coordinates
print 'Q=',Q
Q = Q[0]
#######
lnR0 = n.arange(n.log(R0min),3,0.03)
S0list = []
for lnr0 in lnR0: S0list.append(sig0(n.exp(lnr0)))
S0list = n.array(S0list)
#lnR = n.arange(n.log(Rmin),3,0.1)
del0list = BFZH(S0list,deltac,smin,K)
lnR = n.log(n.exp(lnR0)*RcovEul(del0list,z))
normsize = []
for lnr0 in lnR0:
res = VdndlnR(lnr0)/Q
print n.exp(lnr0),res
normsize.append(res)
p.figure(1)
p.semilogx(n.exp(lnR),normsize,label=str(z))
p.legend()
if True:
S0max = sig0(m2R(M0min))
S0 = n.arange(0,S0max,0.2)
bFZH = deltac-n.sqrt(2*(smin-S0))*K
bFZHlin = bFZH0+bFZH1*S0
p.figure(2)
p.plot(S0,bFZH,'b', label=str(z))
p.plot(S0,bFZHlin,'b.-')
p.ylim([0,20])
p.xlim([0,25])
p.legend()
if False: #for benchmark
for i in range(1000):
S0max = sig0(m2R(M0min))
S0 = n.arange(0,S0max,0.2)
bFZH = deltac-n.sqrt(2*(smin-S0))*K
bFZHlin = bFZH0+bFZH1*S0
p.show()
################
# Z = float(opts.red)
# M0 = zeta*mmin(Z)*float(opts.mul)
# del0 = float(opts.del0)
###########################
# dlist = n.linspace(8,10,10)
# for del0 in dlist:
# res = fcoll_trapz_log(del0,M0,Z)
# print m2S(M0), res[0]
# if False:
# p.figure()
# p.plot(res[1],res[2])
# p.show()
#tplquad(All,mm,M0,lambda x: 0, lambda x: 5., lambda x,y: gam(m2R(x))*y,lambda x,y: 10.,args=(del0,M0,z))
|
apeschar/webwinkelkeur-virtuemart
|
test/test.py
|
Python
|
gpl-3.0
| 3,695
| 0.001083
|
#!/usr/bin/env python3
import subprocess
import sys
from os.path import abspath, dirname, join
from time import sleep, perf_counter
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class Test(object):
def run(self, ip):
self.br = webdriver.Chrome()
self.br.get('http://' + ip)
self.br.implicitly_wait(1)
self.install()
def install(self):
self.fill('jform[site_name]', 'test')
self.fill('jform[admin_email]', 'test@test.com')
self.fill('jform[admin_user]', 'test')
self.fill('jform[admin_password]', 'test')
self.fill('jform[admin_password2]', 'test')
self.click('a[title=Next]')
self.fill('jform[db_host]', 'db')
self.fill('jform[db_user]', 'root')
self.fill('jform[db_name]', 'test')
self.fill('jform[db_prefix]', 'test_')
self.click('a[title=Next]')
self.br.find_element_by_css_selector('.alert.alert-error')
subprocess.run(['bash', '-c', "docker exec ${COMPOSE_PROJECT_NAME}_www_1 bash -c 'rm -f /var/www/html/installation/_Joomla*'"], check=True)
self.click('a[title=Next]')
self.click('a[title=Install]')
self.click('[value="Remove installation folder"]', wait=30)
self.wait_for('[value="Installation folder successfully removed."]')
self.click('a[title=Administrator]')
self.fill('username', 'test')
self.fill('passwd', 'test')
self.click('.login-button')
self.click_text('Install VirtueMart with sample data')
self.click('a.menu-install')
self.br.execute_script('arguments[0].style.display = ""', self.wait_for('#legacy-uploader'))
self.br.find_element_by_name('install_package').send_keys(
abspath(join(dirname(__file__), '..', 'dist', 'pkg_webwinkelkeur.zip')))
sleep(1)
self.click_text('Components')
self.click_text('WebwinkelKeur')
self.fill('webwinkelkeur_wwk_shop_id', '1')
self.fill('webwinkelkeur_wwk_api_key', 'abcd')
self.click('[name=webwinkelkeur_invite]')
self.click('.button-apply')
self.click('a[title="Preview test"]')
self.focus_tab()
self.wait_for('.wwk--sidebar')
def fill(self, name, value):
el = self.br.find_element_by_name(name)
self.br.execute_script('''
arguments[0].value = arguments[1]
var e = document.createEvent('HTMLEvents')
|
e.initEvent('change', false, true)
arguments[0].dispatchEvent(e)
''', el, str(value))
def click(self, css_selector, **kwargs):
self.click_by(By.CSS_SELECTOR, css_selector, **kwargs)
def click_text(self, text, **kwargs):
self.click_by(By.PARTIAL_LINK_TEXT, text, **kwargs)
def wait_for(self, css_selector, wait=30):
return self.wait_by(By.CSS_SELECTOR, css_selector, wait=wait)
def click_by(self, *by, wait=1):
el = se
|
lf.wait_by(*by, wait=wait)
self.br.execute_script('arguments[0].click()', el)
def wait_by(self, *by, wait=1):
return WebDriverWait(self.br, wait).until(EC.presence_of_element_located(by))
def focus_tab(self):
wait_until = perf_counter() + 5
while len(self.br.window_handles) < 2:
if perf_counter() > wait_until:
raise RuntimeError("Waiting for a new tab, but none found")
sleep(0.1)
self.br.switch_to_window(self.br.window_handles[-1])
if __name__ == '__main__':
Test().run(*sys.argv[1:])
|
cgvarela/vitess
|
test/tablet_test.py
|
Python
|
bsd-3-clause
| 2,879
| 0.015283
|
#!/usr/bin/env python
# coding: utf-8
"""Unit tests for vtdb.tablet"""
import unittest
import mock
from net import gorpc
import utils
from vtdb import tablet
class TestRPCCallAndExtract(unittest.TestCase):
"""Tests rpc_call_and_extract_error is tolerant to various responses."""
tablet_conn = tablet.TabletConnection(
'addr', 'type', 'keyspace', 'shard', 30, caller_id='dev')
def test_reply_is_none(self):
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
mock_client.call.return_value = gorpc.GoRpcResponse()
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
def test_reply_is_empty_string(self):
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
response = gorpc.GoRpcResponse()
response.reply = ''
mock_client.call.return_value = response
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
def test_reply_is_string(self):
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
|
response = gorpc.GoRpcResponse()
response.reply = 'foo'
mock_client.call.return_value = response
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
def test_reply_is_dict(self):
|
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
response = gorpc.GoRpcResponse()
response.reply = {'foo': 'bar'}
mock_client.call.return_value = response
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
def test_reply_has_non_dict_err(self):
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
response = gorpc.GoRpcResponse()
response.reply = {'Err': 'foo'}
mock_client.call.return_value = response
with self.assertRaisesRegexp(gorpc.AppError, 'Missing error message'):
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
def test_reply_has_missing_err_message(self):
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
response = gorpc.GoRpcResponse()
response.reply = {'Err': {'foo': 'bar'}}
mock_client.call.return_value = response
with self.assertRaisesRegexp(gorpc.AppError, 'Missing error message'):
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
def test_reply_has_err_message(self):
with mock.patch.object(
self.tablet_conn, 'client', autospec=True) as mock_client:
response = gorpc.GoRpcResponse()
response.reply = {'Err': {'Message': 'bar'}}
mock_client.call.return_value = response
with self.assertRaisesRegexp(gorpc.AppError, "'bar', 'method'"):
self.tablet_conn.rpc_call_and_extract_error('method', 'req')
if __name__ == '__main__':
utils.main()
|
UCSD-E4E/qx100-interfacing
|
qx100.py
|
Python
|
gpl-2.0
| 3,536
| 0.019231
|
#!/usr/bin/env python
"""QX100 interfacing code for python"""
import json
import requests
import numpy as np
import cv2
import threading
from cmd import Cmd
class LiveviewThread(threading.Thread):
running = True
def __init(self, url):
threading.Thread.__init__(self)
self.url = url
self.running = True
def run(self):
s = start_liveview()
data = open_stream(s)
while self.running:
jpg = decode_frame(data)
show_img(jpg)
data.raw.close()
cv2.destroyWindow('liveview')
def stop_running(self):
self.running = False
class MyPrompt(Cmd):
LVthread = LiveviewThread()
def do_t(self, args):
take_picture()
def do_loop(self, args):
for i in range(int(args)):
take_picture()
print i
def do_start_liveview(self, args):
self.LVthread.start()
def do_stop_liveview(self, args):
self.LVthread.stop_running()
def do_quit(self, args):
self.do_stop_liveview([])
raise SystemExit
def get_payload(method, params):
return {
"method": method,
"params": params,
"id": 1,
"version": "1.0"
}
def take_picture():
payload = get_payload("actTakePicture", [])
headers = {'Content-Type': 'application/json'}
response = requests.post('http://10.0.0.1:10000/sony/camera', data=json.dumps(payload), headers=headers)
url = response.json()['result']
strurl = str(url[0][0])
return strurl
def get_event():
payload = get_payload("getEvent", [False])
headers = {'Content-Type': 'application/json'}
response = requests.post('http://10.0.0.1:10000/sony/camera', data=json.dumps(payload), headers=headers)
return response
def get_picture(url, filename):
response = requests.get(url)
chunk_size = 1024
with open(filename, 'wb') as fd:
for chunk in response.iter_content(chunk_size):
fd.write(chunk)
### LIVEVIEW STUFF
def start_liveview():
payload = get_payload("startLiveview", [])
headers = {'Content-Type': 'application/json'}
response = requests.post('http://10.0.0.1:10000/sony/camera', data=json.dumps(payload), headers=headers)
url = response.json()['result']
strurl = str(url[0])
return strurl
def open_stream(url):
return requests.get(url, stream=True)
def decode_frame(data):
# decode p
|
acket header
start = ord(data.raw.read(1))
if(start != 0xFF):
print 'bad start byte\nexpected 0xFF got %x'%start
return
pkt_type = ord(data.raw.read(1))
if(pkt_type
|
!= 0x01):
print 'not a liveview packet'
return
frameno = int(data.raw.read(2).encode('hex'), 16)
timestamp = int(data.raw.read(4).encode('hex'), 16)
# decode liveview header
start = int(data.raw.read(4).encode('hex'), 16)
if(start != 0x24356879):
print 'expected 0x24356879 got %x'%start
return
jpg_size = int(data.raw.read(3).encode('hex'), 16)
pad_size = ord(data.raw.read(1))
# read out the reserved header
data.raw.read(4)
fixed_byte = ord(data.raw.read(1))
if(fixed_byte is not 0x00):
print 'expected 0x00 got %x'%fixed_byte
return
data.raw.read(115)
# read out the jpg
jpg_data = data.raw.read(jpg_size)
data.raw.read(pad_size)
return jpg_data
def show_img(str_jpg):
nparr = np.fromstring(str_jpg, np.uint8)
img_np = cv2.imdecode(nparr, cv2.CV_LOAD_IMAGE_COLOR)
cv2.namedWindow('liveview', flags=cv2.CV_WINDOW_AUTOSIZE)
cv2.imshow('liveview', img_np)
cv2.waitKey(1)
prompt = MyPrompt()
prompt.prompt = '> '
prompt.cmdloop('starting qx100 control')
|
atreal/atrealtheme.alderamin
|
src/atrealtheme/alderamin/tests/test_setup.py
|
Python
|
gpl-2.0
| 1,242
| 0.003221
|
# -*- coding: utf-8 -*-
"""Setup/installation tests for this package."""
from atrealtheme.alderamin.testing import IntegrationTestCase
from plone import api
class TestInstall(IntegrationTestCase):
"""Test installation of atrealtheme.alderamin into Plone."""
def setUp(self):
"""Custom shared utility setup for tests."""
self.portal = self.layer['portal']
self.installer = api.portal.get_tool('portal_quickinstaller')
def test_product_installed(self):
"""Test if atrealtheme.alderamin is installed with portal_quickinstaller."""
self.assertTrue(self.installer.isProductInstalled('atrealtheme.alderamin'))
def test_uninstall(self):
"""Test if atrealtheme.alderamin is cleanly uninstalled."""
self.installer.uninstallProducts(['atrealtheme.alderamin'])
self.assertFalse(self.installer.isProductInstalled('atrealtheme.alderamin'))
# browse
|
rlayer.xml
def test_browserlayer(self):
"""Test that IAtrealthemeAlderaminLay
|
er is registered."""
from atrealtheme.alderamin.interfaces import IAtrealthemeAlderaminLayer
from plone.browserlayer import utils
self.failUnless(IAtrealthemeAlderaminLayer in utils.registered_layers())
|
duy/dhcpscapy
|
scripts/dhcpclientscapy.py
|
Python
|
agpl-3.0
| 11,570
| 0.005618
|
#! /usr/bin/env python
# vim:ts=4:sw=4:expandtab 2
# -*- coding: utf-8 -*-
'''
Based on https://github.com/mortnerDHCPv4v6
'''
# TODO:
# * refactor
# * read conf from dhclient.conf
# * daemonize
# * requests in loop
# * send renew according to renew time
# * implement release
# * implement nak case
# FIXME:
# * build package with most common BOOTP/DCHCP options
__author__ = "duy <duy at rhizoma dot tk>"
__copyright__ = "GPL v3"
from scapy.all import *
import random
import ipaddr
from time import sleep
import subprocess
import argparse
# for debugging
#CLIENT_PORT= 8001
#SERVER_PORT= 8000
CLIENT_PORT= 68
SERVER_PORT= 67
BROADCAST_ADDR = '255.255.255.255'
META_ADDR = '0.0.0.0'
BROADCAST_MAC = 'ff:ff:ff:ff:ff:ff'
MAX_DHCP_LEASE = 1500
LEASE_TIME = 43200 # 7776000
# "subnet_mask", "router", "name_server", "domain"
PARAM_REQ_LIST = '\x01\x03\x06\x0fw\xfc'# \x1c3
INIT_STATE = 0
BOUND_STATE = 1
RENEW_STATE = 2
REBIND_STATE = 3
REBOOT_STATE = 4
TIMEOUT_STATE = 5
RENEW_TIME_ON_LEASE = 1.0/2
REBIND_TIME_ON_LEASE = 7.0/8
class Limits:
XID_MIN = 1
XID_MAX = 900000000
def randomHostname(length=8, charset=None):
charset = charset or string.ascii_uppercase + string.digits
return ''.join(random.choice(charset) for x in range(length))
def genXid():
return random.randint(Limits.XID_MIN, Limits.XID_MAX)
class DHCPv4Client(object):
def __init__(self, iface, server_port=None, client_port=None, server_ip=None,
server_mac=None, hostname=None):
self.iface = iface
self.state = INIT_STATE
self.renew_time = 0
self.rebind_time = 0
self.server_port = server_port or SERVER_PORT
self.client_port = client_port or CLIENT_PORT
self.server_ip = server_ip or BROADCAST_ADDR
self.server_mac = server_mac or BROADCAST_MAC
self.client_ip = META_ADDR
_, client_mac = get_if_raw_hwaddr(self.iface)
self.client_mac = client_mac
self.hostname = hostname or randomHostname()
self.client_xid = genXid()
# FIXME: when server xid is used?
self.server_xid = None
self.server_id = None
self.response_server_ip = None
self.response_server_mac = None
self.client_ip_offered = None
self.subnet_mask = None
self.offered_ip = None
self.lease_time = None
self.router = None
self.name_server = None
self.domain = None
self.options = []
self.callbacks = {}
self.history = []
def __str__(self):
return self.__repr__()
def __repr__(self):
return """DHCPv4 Client
Interface: %sp
Verbosity: %s
Client Configuration: | Server
-------------------------------------|------------------------------
IP = %-20s %-20s
HWAddr = %-20s %-20s
Hostname = %-20s
MASK = %-20s
xID = %-20s %-20s
DHCP Specific
--------------------
serverID = %-20s
Options = %-20s
Registered Callbacks
--------------------
%s
History
--------------------
%s
""" % (conf.iface, conf.verb,
self.client_ip,
self.server_ip,
self.client_mac,
self.server_mac,
self.hostname,
self.subnet_mask,
self.client_xid,
self.server_xid,
self.server_id,
repr(self.options),
|
self.callbacks,
self.history)
def register_callback(self, hook, func):
self.callbacks[hook] = func
def exec_callback(self, hook, args):
self.track_history("Hook:" + str(hook))
if self.callbacks.has_key(hook):
self.callbacks[hook]()
def track_history(self, name=None):
from inspect import stack
name = name or stack()[1][3]
self.history.append(name)
de
|
f genDiscover(self):
dhcp_discover = (
Ether(src=str2mac(self.client_mac), dst=self.server_mac) /
IP(src=self.client_ip, dst=self.server_ip) /
UDP(sport=self.client_port, dport=self.server_port) /
BOOTP(chaddr=[self.client_mac], xid=self.client_xid) /
DHCP(options=[
("message-type", "discover"),
("param_req_list", PARAM_REQ_LIST),
("max_dhcp_size", MAX_DHCP_LEASE),
("client_id", self.client_mac),
("lease_time", LEASE_TIME),
("hostname", self.hostname),
"end"
])
)
return dhcp_discover
def genRequest(self):
dhcp_req = (
Ether(src=str2mac(self.client_mac), dst=self.server_mac) /
IP(src=self.client_ip, dst=self.server_ip) /
UDP(sport=self.client_port, dport=self.server_port) /
BOOTP(chaddr=[self.client_mac], xid=self.client_xid) /
DHCP(options=[
("message-type", "request"),
("param_req_list", PARAM_REQ_LIST),
("max_dhcp_size", MAX_DHCP_LEASE),
("client_id", self.client_mac),
("requested_addr", self.client_ip_offered), # obtained from discover
("server_id", self.server_id), # obtained from discover
("hostname", self.hostname),
"end"
])
)
return dhcp_req
def genRelease(self):
dhcp_release = (
Ether(src=str2mac(self.client_mac), dst=self.server_mac) /
IP(src=self.client_ip, dst=self.server_ip) /
UDP(sport=self.client_port, dport=self.server_port) /
BOOTP(chaddr=[self.client_mac], xid=self.client_xid) /
DHCP(options=[
("message-type", "release"),
("server_id", self.server_id), # obtained from discover
("client_id", self.client_mac),
"end"
])
)
return dhcp_release
def parseOffer(self, packet):
print 'Parsing offer'
print packet.show()
self.response_server_ip = packet[IP].src
self.response_server_mac = packet[Ether].src
self.server_id = packet[BOOTP].siaddr
#FIXME: xid has to match the initial xid
# packet[BOOTP].xid
# FIXME: chaddr has to match client_mac
# str2mac(packet[BOOTP].chaddr)
# FIXME: check if yiaddr match current client ip or requested ip
self.client_ip_offered = packet[BOOTP].yiaddr
for option in packet[DHCP].options:
if type(option) == tuple:
if option[0] == 'subnet_mask':
self.subnet_mask = option[1]
if option[0] == 'router':
self.router = option[1]
if option[0] == 'domain':
self.domain = option[1]
if option[0] == 'name_server':
self.name_server = option[1]
if option[0] == 'lease_time':
self.lease_time = option[1]
def parseACK(self, packet):
print "Parsing ACK"
print packet.show()
# FIXME: check these fields match current ones?
#self.response_server_ip = packet[IP].src
#self.response_server_mac = packet[Ether].src
#self.server_id = packet[BOOTP].siaddr
#FIXME: xid has to match the initial xid
# packet[BOOTP].xid
# FIXME: chaddr has to match client_mac
# str2mac(packet[BOOTP].chaddr)
# FIXME: check if yiaddr match current client ip or requested ip
self.client_ip_offered = packet[BOOTP].yiaddr
#FIXME: check these options match offered ones?
for option in packet[DHCP].options:
if type(option) == tuple:
if option[0] == 'subnet_ma
|
simpleapples/light-blog
|
app/auth/forms/login_form.py
|
Python
|
mit
| 295
| 0.00678
|
from flask.ext.wtf import Form
from wtforms import StringField, PasswordField
|
from wtforms.validators import DataRequired, Length, Email
class LoginForm(Form):
email = StringField(validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField(v
|
alidators=[DataRequired()])
|
drestuart/delvelib
|
lib/pygcurse/pygcurse_old.py
|
Python
|
lgpl-3.0
| 110,209
| 0.006288
|
"""
Please forgive any typos or errors in the comments, I'll be cleaning them up as frequently as I can.
Pygcurse v0.1 alpha
Pygcurse (pronounced "pig curse") is a curses library emulator that runs on top of the Pygame framework. It provides an easy way to create text adventures, roguelikes, and console-style applications.
Unfortunately, the curses library that comes with the Python standard library does not work on Windows. The excellent Console module from effbot provides curses-like features, but it only runs on Windows and not Mac/Linux. By using Pygame, Pygcurse is able to run on all platforms.
Pygcurse provides several benefits over normal text-based stdio programs:
1) Color text and background.
2) The ability to move the cursor and print text anywhere in the console window.
3) The ability to make console apps that make use of the mouse.
4) The ability to have programs respond to individual key presses, instead of waiting for the user to type an entire string and press enter (as with input()/raw_input()).
5) Since the console window that Pygcurse uses is just a Pygame surface object, additional drawing and transformations can be applied to it. Multiple consoles can also be used in the same program.
Pygcurse requires Pygame to be installed. Pygame can be downloaded from http://pygame.org
Pygcurse was developed by Al Sweigart (al@inventwithpython.com)
https://github.com/asweigart/pygcurse
Simplified BSD License:
Copyright 2011 Al Sweigart. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are
permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list
of conditions and the following disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY Al Sweigart ''AS IS'' AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Al Sweigart OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those of the
authors and should not be interpreted as representing official policies, either expressed
or implied, of Al Sweigart.
"""
import copy
import time
import sys
import textwrap
import pygame
from pygame.locals import *
"""
Some nomenclature in this module's comments explained:
Cells:
The space for each character is called a cell in this module. Cells are all of an identical size, which is based on the font being used. (only a single font of a single size can be used in a PygcurseSurface object. Cell coordinates refer to the positions of characters on the surface. Pixel coordinates refer to the position of each pixel.
Scrolling:
The term "scrolling" refers to when a character is printed at the bottom right corner, which causes all the characters on the surface to be moved up and a blank row to be created at the bottom. The print() and write() functions causes scolls if it prints enough characters. The putchar() and putchars() functions do not.
Color parameters:
Several Pygcurse functions take colors for their parameters. These can almost always (there might be some exceptions) be:
1) A pygame.Color object.
2) An RGB tuple of three integers, 0 to 255 (like Pygame uses)
3) An RGBA tuple of four integers, 0 to 255 (like Pygame uses)
4) A string such as 'blue', 'lime', or 'gray' (or any of the strings listed in the colornames gloal dictionary. This dict can be updated with more colors if the user wants.)
5) None, which means use whatever color the cell already uses.
Region parameters:
A "region" defines an area of the surface. It can be the following formats:
1) Four-integer tuple (x, y, width, height)
2) Four-integer tuple (x, y, None, None) which means x,y and extending to the right & bottom edge of the surface
3) None or (None, None, None, None) which means the entire surface
4) pygame.Rect object
Note about flickering: If your program is experiencing a lot of flicker, than you should disable the self._autoupdate member. By default, this is enabled and the screen is redrawn after each method call that makes a change to the screen.
"""
DEFAULTFGCOLOR = pygame.Color(164, 164, 164, 255) # default foreground color is gray (must be a pygame.Color object)
DEFAULTBGCOLOR = pygame.Color(0, 0, 0, 255) # default background color is black (must be a pygame.Color object)
ERASECOLOR = pygame.Color(0, 0, 0, 0) # erase color has 0 alpha level (must be a pygame.Color object)
# Internally used constants:
_NEW_WINDOW = 'new_window'
FULLSCREEN = 'full_screen'
# Directional constants:
NORTH = 'N'
EAST = 'E'
SOUTH = 'S'
WEST = 'W'
NORTHEAST = 'NE'
NORTHWEST = 'NW'
SOUTHEAST = 'SE'
SOUTHWEST = 'SW'
# A mapping of strings to color objects.
colornames = {'white': pygame.Color(255, 255, 255),
'yellow': pygame.Color(255, 255, 0),
'fuchsia': pygame.Color(255, 0, 255),
'red': pygame.Color(255, 0, 0),
'silver': pygame.Color(192, 192, 192),
'gray': pygame.Color(128, 128, 128),
'olive': pygame.Color(128, 128, 0),
'purple': pygame.Color(128, 0, 128),
'maroon': pygame.Color(128, 0, 0),
'aqua': pygame.Color( 0, 255, 255),
'lime': pygame.Color( 0, 255, 0),
'teal': pygame.Color( 0, 128, 128),
|
'green': pygame.Color( 0, 128, 0),
'blue': pygame.Color( 0, 0, 255),
'navy': pygame.Color( 0, 0, 128),
'black': pygame.Color( 0, 0, 0)}
class PygcurseSurface(object):
"""
A PygcurseSurface object is the ascii-based analog of Pygame's Surface objects. It represents a 2D field of ascii characters, exactly like a console terminal. Each cell can have a different characte
|
r, foreground color, background color, and RGB tint. The PygcurseSurface object also tracks the location of the cursor (where the print() and putchar() functions will output text) and the "input cursor" (the blinking cursor when the user is typing in characters.)
Each xy position on the surface is called a "cell". A cell can hold one and only one character.
The PygcurseSurface object contains a pygame.Surface object that it draws to. This pygame.Surface object in turn may have additional Pygame drawing functions called on it before being drawn to the screen with pygame.display.update().
It should be noted that none of the code in the pygcurse module should at all be considered thread-safe.
"""
_pygcurseClass = 'PygcurseSurface'
def __init__(self, width=80, height=25, font=None, fgcolor=DEFAULTFGCOLOR, bgcolor=DEFAULTBGCOLOR, windowsurface=None):
"""
Creates a new PygcurseSurface object.
- width and height are the number of characters the the object can display.
- font is a pygame.Font object used to display the characters. PygcurseSurface can only display one font of one size at a time. The size of the underlying pygame.Surface object is calculated from the font size and width/height accordingly. If None, then a default generic font is used.
- fgcolor is the foreground color (ie the color of the text). It is set to either a pygame.Color object, an RGB tuple, an RGBA tuple, or a string that is a key in the colornames dict.
- bgco
|
passiweinberger/nupic
|
tests/swarming/nupic/swarming/swarming_test.py
|
Python
|
agpl-3.0
| 101,562
| 0.009561
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import sys
import os
import imp
import subprocess
import re
import json
import pprint
import shutil
import copy
import StringIO
import logging
import itertools
import numpy
import time
import math
import uuid
import tempfile
from pkg_resources import resource_filename
from optparse import OptionParser
from nupic.database.ClientJobsDAO import ClientJobsDAO
from nupic.support import configuration, initLogging
from nupic.support.unittesthelpers.testcasebase import (unittest,
TestCaseBase as HelperTestCaseBase)
from nupic.swarming import HypersearchWorker
from nupic.swarming.api import getSwarmModelParams, createAndStartSwarm
from nupic.swarming.hypersearch.utils import generatePersistentJobGUID
from nupic.swarming.DummyModelRunner import OPFDummyModelRunner
DEFAULT_JOB_TIMEOUT_SEC = 60 * 2
# Filters _debugOut messages
g_debug = True
# Our setUpModule entry block sets this to an instance of MyTestEnvironment()
g_myEnv = None
# These are the args after using the optparse
# This value for the swarm maturity window gives more repeatable results for
# unit tests that use multiple workers
g_repeatableSwarmMaturityWindow = 5
class MyTestEnvironment(object):
# =======================================================================
def __init__(self):
# Save all command line options
self.options = _ArgParser.parseArgs()
# Create the path to our source experiments
thisFile = __file__
testDir = os.path.split(os.
|
path.abspath(thisFile))[0]
self.testSrcExpDir = os.path.join(testDir, 'experiments')
self.testSrcDataDir = os.path.join(testDir, 'data')
return
class ExperimentTestBaseClass(HelperTestCaseBase):
def setUp(self):
""" Method called to prepare the test fixture. This is called by the
unittest framework immediat
|
ely before calling the test method; any exception
raised by this method will be considered an error rather than a test
failure. The default implementation does nothing.
"""
pass
def tearDown(self):
""" Method called immediately after the test method has been called and the
result recorded. This is called even if the test method raised an exception,
so the implementation in subclasses may need to be particularly careful
about checking internal state. Any exception raised by this method will be
considered an error rather than a test failure. This method will only be
called if the setUp() succeeds, regardless of the outcome of the test
method. The default implementation does nothing.
"""
# Reset our log items
self.resetExtraLogItems()
def shortDescription(self):
""" Override to force unittest framework to use test method names instead
of docstrings in the report.
"""
return None
def _printTestHeader(self):
""" Print out what test we are running
"""
print "###############################################################"
print "Running test: %s.%s..." % (self.__class__, self._testMethodName)
def _setDataPath(self, env):
""" Put the path to our datasets int the NTA_DATA_PATH variable which
will be used to set the environment for each of the workers
Parameters:
---------------------------------------------------------------------
env: The current environment dict
"""
assert env is not None
# If already have a path, concatenate to it
if "NTA_DATA_PATH" in env:
newPath = "%s:%s" % (env["NTA_DATA_PATH"], g_myEnv.testSrcDataDir)
else:
newPath = g_myEnv.testSrcDataDir
env["NTA_DATA_PATH"] = newPath
def _launchWorkers(self, cmdLine, numWorkers):
""" Launch worker processes to execute the given command line
Parameters:
-----------------------------------------------
cmdLine: The command line for each worker
numWorkers: number of workers to launch
retval: list of workers
"""
workers = []
for i in range(numWorkers):
stdout = tempfile.TemporaryFile()
stderr = tempfile.TemporaryFile()
p = subprocess.Popen(cmdLine, bufsize=1, env=os.environ, shell=True,
stdin=None, stdout=stdout, stderr=stderr)
workers.append(p)
return workers
def _getJobInfo(self, cjDAO, workers, jobID):
""" Return the job info for a job
Parameters:
-----------------------------------------------
cjDAO: client jobs database instance
workers: list of workers for this job
jobID: which job ID
retval: job info
"""
# Get the job info
jobInfo = cjDAO.jobInfo(jobID)
# Since we're running outside of the Nupic engine, we launched the workers
# ourself, so see how many are still running and jam the correct status
# into the job info. When using the Nupic engine, it would do this
# for us.
runningCount = 0
for worker in workers:
retCode = worker.poll()
if retCode is None:
runningCount += 1
if runningCount > 0:
status = ClientJobsDAO.STATUS_RUNNING
else:
status = ClientJobsDAO.STATUS_COMPLETED
jobInfo = jobInfo._replace(status=status)
if status == ClientJobsDAO.STATUS_COMPLETED:
jobInfo = jobInfo._replace(
completionReason=ClientJobsDAO.CMPL_REASON_SUCCESS)
return jobInfo
def _generateHSJobParams(self,
expDirectory=None,
hsImp='v2',
maxModels=2,
predictionCacheMaxRecords=None,
dataPath=None,
maxRecords=10):
"""
This method generates a canned Hypersearch Job Params structure based
on some high level options
Parameters:
---------------------------------------------------------------------
predictionCacheMaxRecords:
If specified, determine the maximum number of records in
the prediction cache.
dataPath: When expDirectory is not specified, this is the data file
to be used for the operation. If this value is not specified,
it will use the /extra/qa/hotgym/qa_hotgym.csv.
"""
if expDirectory is not None:
descriptionPyPath = os.path.join(expDirectory, "description.py")
permutationsPyPath = os.path.join(expDirectory, "permutations.py")
permutationsPyContents = open(permutationsPyPath, 'rb').read()
descriptionPyContents = open(descriptionPyPath, 'rb').read()
jobParams = {'persistentJobGUID' : generatePersistentJobGUID(),
'permutationsPyContents': permutationsPyContents,
'descriptionPyContents': descriptionPyContents,
'maxModels': maxModels,
'hsVersion': hsImp}
if predictionCacheMaxRecords is not None:
jobParams['predictionCacheMaxRecords'] = predictionCacheMaxRecords
else:
# Form the stream definition
if dataPath is None:
dataPath = resource_filename("nupic.data",
os.path.join("extra", "qa", "hotgym",
|
wevote/WebAppPublic
|
apis_v1/documentation_source/voter_star_on_save_doc.py
|
Python
|
bsd-3-clause
| 4,125
| 0.003394
|
# apis_v1/documentation_source/voter_star_on_save_doc.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
def voter_star_on_save_doc_template_values(url_root):
"""
Show documentation about voterStarOnSave
"""
required_query_parameter_list = [
{
'name': 'api_key',
'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string
'description': 'The unique key provided to any organization using the WeVoteServer APIs',
},
{
'name': 'voter_device_id',
'value': 'string', # boolean, integer, long, string
'description': 'An 88 character unique identifier linked to a voter record on the server',
},
{
'name': 'kind_of_ballot_item',
'value': 'string', # boolean, integer, long, string
'description': 'What is the type of ballot item for which we are saving the \'on\' status? '
'(kind_of_ballot_item is either "OFFICE", "CANDIDATE", "POLITICIAN" or "MEASURE")',
},
{
'name': 'ballot_item_id',
'value': 'integer', # boolean, integer, long, string
'description': 'The unique internal identifier for this ballot_item '
'(either ballot_item_id OR ballot_item_we_vote_id required -- not both. '
'If it exists, ballot_item_id is used instead of ballot_item_we_vote_id)',
},
{
'name': 'ballot_item_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The unique identifier for this ballot_item across all networks '
'(either ballot_item_id OR ballot_item_we_vote_id required -- not both. '
'NOTE: In the future we might support other identifiers used in the industry.',
},
]
optional_query_parameter_list = [
]
potential_stat
|
us_codes_list = [
{
'code': 'VALID_VOTER_DEVICE_ID_MISSING',
'description': 'Cannot proceed. A valid voter_device_id parameter was not included.',
},
|
{
'code': 'VALID_VOTER_ID_MISSING',
'description': 'Cannot proceed. Missing voter_id while trying to save.',
},
{
'code': 'STAR_ON_OFFICE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
{
'code': 'STAR_ON_CANDIDATE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
{
'code': 'STAR_ON_MEASURE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
]
try_now_link_variables_dict = {
'kind_of_ballot_item': 'CANDIDATE',
'ballot_item_id': '5655',
}
api_response = '{\n' \
' "status": string (description of what happened),\n' \
' "success": boolean (did the save happen?),\n' \
' "ballot_item_id": integer,\n' \
' "ballot_item_we_vote_id": string,\n' \
' "kind_of_ballot_item": string (CANDIDATE, MEASURE),\n' \
'}'
template_values = {
'api_name': 'voterStarOnSave',
'api_slug': 'voterStarOnSave',
'api_introduction':
"Save or create private 'star on' state for the current voter for a measure, an office or candidate.",
'try_now_link': 'apis_v1:voterStarOnSaveView',
'try_now_link_variables_dict': try_now_link_variables_dict,
'url_root': url_root,
'get_or_post': 'GET',
'required_query_parameter_list': required_query_parameter_list,
'optional_query_parameter_list': optional_query_parameter_list,
'api_response': api_response,
'api_response_notes':
"",
'potential_status_codes_list': potential_status_codes_list,
}
return template_values
|
hinrek/Suvepraktika
|
events/migrations/0006_auto_20170620_1225.py
|
Python
|
mit
| 1,100
| 0.001818
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-0
|
6-20 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import location_field.models.plain
class Migration(migrations.Migration):
dependencies = [
('events', '0005_merge_20170619_1150'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='text',
field=models.TextField(verbose_name=''),
),
migrations.AlterField(
|
model_name='event',
name='city',
field=models.CharField(default='Tallinn', max_length=255, verbose_name='Linn'),
),
migrations.AlterField(
model_name='event',
name='descripton',
field=models.TextField(verbose_name='Kirjeldus'),
),
migrations.AlterField(
model_name='event',
name='location',
field=location_field.models.plain.PlainLocationField(default='59.43696079999999,24.75357459999998', max_length=63, verbose_name='Asukoht'),
),
]
|
SUSE/azure-sdk-for-python
|
azure-mgmt-resource/azure/mgmt/resource/subscriptions/v2016_06_01/models/location_paged.py
|
Python
|
mit
| 874
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class LocationPaged(Paged):
"""
A paging container for iterating over a list of Location object
"""
_attribute_map = {
'next_link
|
': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[Location]'}
}
def __init__(self, *args, **kwargs):
super(LocationPaged, self).__init__(*args, **kwar
|
gs)
|
tsdmgz/ansible
|
lib/ansible/modules/network/aci/aci_bd.py
|
Python
|
gpl-3.0
| 12,055
| 0.002323
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_bd
short_description: Manage Bridge Domains (BD) on Cisco ACI Fabrics (fv:BD)
description:
- Manages Bridge Domains (BD) on Cisco ACI Fabrics.
- More information from the internal APIC class
I(fv:BD) at U(https://developer.cisco.com/media/mim-ref/MO-fvBD.html).
author:
- Swetha Chunduri (@schunduri)
- Dag Wieers (@dagwieers)
- Jacob McGill (@jmcgill298)
requirements:
- ACI Fabric 1.0(3f)+
version_added: '2.4'
notes:
- The C(tenant) used must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
options:
arp_flooding:
description:
- Determines if the Bridge Domain should flood ARP traffic.
- The APIC defaults new Bridge Domains to C(no).
choices: [ no, yes ]
default: no
bd:
description:
- The name of the Bridge Domain.
aliases: [ bd_name, name ]
bd_type:
description:
- The type of traffic on the Bridge Domain.
- The APIC defaults new Bridge Domains to C(ethernet).
choices: [ ethernet, fc ]
default: ethernet
description:
description:
- Description for the Bridge Domain.
enable_multicast:
description:
- Determines if PIM is enabled
- The APIC defaults new Bridge Domains to C(no).
choices: [ no, yes ]
default: no
enable_routing:
description:
- Determines if IP forw
|
arding should be allowed.
- The APIC defaults new Bridge Domains to C(yes).
choices:
|
[ no, yes ]
default: yes
endpoint_clear:
description:
- Clears all End Points in all Leaves when C(yes).
- The APIC defaults new Bridge Domains to C(no).
- The value is not reset to disabled once End Points have been cleared; that requires a second task.
choices: [ no, yes ]
default: no
endpoint_move_detect:
description:
- Determines if GARP should be enabled to detect when End Points move.
- The APIC defaults new Bridge Domains to C(garp).
choices: [ default, garp ]
default: garp
endpoint_retention_action:
description:
- Determines if the Bridge Domain should inherit or resolve the End Point Retention Policy.
- The APIC defaults new Bridge Domain to End Point Retention Policies to C(resolve).
choices: [ inherit, resolve ]
default: resolve
endpoint_retention_policy:
description:
- The name of the End Point Retention Policy the Bridge Domain should use when
overriding the default End Point Retention Policy.
igmp_snoop_policy:
description:
- The name of the IGMP Snooping Policy the Bridge Domain should use when
overriding the default IGMP Snooping Policy.
ip_learning:
description:
- Determines if the Bridge Domain should learn End Point IPs.
- The APIC defaults new Bridge Domains to C(yes).
choices: [ no, yes ]
ipv6_nd_policy:
description:
- The name of the IPv6 Neighbor Discovery Policy the Bridge Domain should use when
overridding the default IPV6 ND Policy.
l2_unknown_unicast:
description:
- Determines what forwarding method to use for unknown l2 destinations.
- The APIC defaults new Bridge domains to C(proxy).
choices: [ proxy, flood ]
default: proxy
l3_unknown_multicast:
description:
- Determines the forwarding method to use for unknown multicast destinations.
- The APCI defaults new Bridge Domains to C(flood).
choices: [ flood, opt-flood ]
default: flood
limit_ip_learn:
description:
- Determines if the BD should limit IP learning to only subnets owned by the Bridge Domain.
- The APIC defaults new Bridge Domains to C(yes).
choices: [ no, yes ]
default: yes
multi_dest:
description:
- Determines the forwarding method for L2 multicast, broadcast, and link layer traffic.
- The APIC defaults new Bridge Domains to C(bd-flood).
choices: [ bd-flood, drop, encap-flood ]
default: bd-flood
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
tenant:
description:
- The name of the Tenant.
aliases: [ tenant_name ]
vrf:
description:
- The name of the VRF.
aliases: [ vrf_name ]
'''
EXAMPLES = r'''
- name: Add Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: false
state: present
tenant: prod
bd: web_servers
vrf: prod_vrf
- name: Add an FC Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: false
state: present
tenant: prod
bd: storage
bd_type: fc
vrf: fc_vrf
enable_routing: no
- name: Modify a Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: present
tenant: prod
bd: web_servers
arp_flooding: yes
l2_unknown_unicast: flood
- name: Query All Bridge Domains
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: query
- name: Query a Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: query
tenant: prod
bd: web_servers
- name: Delete a Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: absent
tenant: prod
bd: web_servers
'''
RETURN = r''' # '''
from ansible.module_utils.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec
argument_spec.update(
arp_flooding=dict(choices=['no', 'yes']),
bd=dict(type='str', aliases=['bd_name', 'name']),
bd_type=dict(type='str', choices=['ethernet', 'fc']),
description=dict(type='str'),
enable_multicast=dict(type='str', choices=['no', 'yes']),
enable_routing=dict(type='str', choices=['no', 'yes']),
endpoint_clear=dict(type='str', choices=['no', 'yes']),
endpoint_move_detect=dict(type='str', choices=['default', 'garp']),
endpoint_retention_action=dict(type='str', choices=['inherit', 'resolve']),
endpoint_retention_policy=dict(type='str'),
igmp_snoop_policy=dict(type='str'),
ip_learning=dict(type='str', choices=['no', 'yes']),
ipv6_nd_policy=dict(type='str'),
l2_unknown_unicast=dict(choices=['proxy', 'flood']),
l3_unknown_multicast=dict(choices=['flood', 'opt-flood']),
limit_ip_learn=dict(type='str', choices=['no', 'yes']),
multi_dest=dict(choices=['bd-flood', 'drop', 'encap-flood']),
state=dict(choices=['absent', 'present', 'query'], type='str', default='present'),
tenant=dict(type='str', aliases=['tenant_name']),
vrf=dict(type='str', aliases=['vrf_name']),
gateway_ip=dict(type='str', removed_in_version='2.4'), # Deprecated starting from v2.4
method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6
scope=dict(type='str', removed_in_version='2.4'), # Deprecated starting from v2.4
subnet_mask=dict(type='str', removed_in_version='2.4'), # Deprecated starting from v2.4
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['bd', 'tenant']],
['state', 'present', ['bd', 'tenant']],
]
|
lesommer/oocgcm
|
oocgcm/oceanfuncs/eos/teos10.py
|
Python
|
apache-2.0
| 209
| 0.004785
|
#!/usr/bin/env pyt
|
hon
#
"""oocgcm.oceanfuncs.eos.teos10
Equation of state of sea water and related quantities.
This module uses the formulas from the
Thermodynamic Equation Of Seawater - 2010 (TEOS-10)
"
|
""
|
JulienMcJay/eclock
|
windows/Python27/Lib/site-packages/docutils/parsers/rst/languages/cs.py
|
Python
|
gpl-2.0
| 4,857
| 0.002059
|
# $Id: cs.py 7119 2011-09-02 13:00:23Z milde $
# Author: Marek Blaha <mb@dat.cz>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Czech-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'pozor': 'attention',
u'caution (translation required)': 'caution', # jak rozlisit caution a warning?
u'code (translation required)': 'code',
u'nebezpe\u010D\u00ED': 'danger',
u'chyba': 'error',
u'rada': 'hint',
u'd\u016Fle\u017Eit\u00E9': 'important',
u'pozn\u00E1mka': 'note',
u'tip (translation required)': 'tip',
u'varov\u00E1n\u00ED': 'warning',
u'admonition (translation required)': 'admonition',
u'sidebar (translation required)':
|
'sidebar',
u't\u00E9ma': 'topic',
u'line-block (translation required)': 'line-block',
u'parsed-literal (translation required)': 'pars
|
ed-literal',
u'odd\u00EDl': 'rubric',
u'moto': 'epigraph',
u'highlights (translation required)': 'highlights',
u'pull-quote (translation required)': 'pull-quote',
u'compound (translation required)': 'compound',
u'container (translation required)': 'container',
#'questions': 'questions',
#'qa': 'questions',
#'faq': 'questions',
u'table (translation required)': 'table',
u'csv-table (translation required)': 'csv-table',
u'list-table (translation required)': 'list-table',
u'math (translation required)': 'math',
u'meta (translation required)': 'meta',
#'imagemap': 'imagemap',
u'image (translation required)': 'image', # obrazek
u'figure (translation required)': 'figure', # a tady?
u'include (translation required)': 'include',
u'raw (translation required)': 'raw',
u'replace (translation required)': 'replace',
u'unicode (translation required)': 'unicode',
u'datum': 'date',
u't\u0159\u00EDda': 'class',
u'role (translation required)': 'role',
u'default-role (translation required)': 'default-role',
u'title (translation required)': 'title',
u'obsah': 'contents',
u'sectnum (translation required)': 'sectnum',
u'section-numbering (translation required)': 'sectnum',
u'header (translation required)': 'header',
u'footer (translation required)': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'target-notes (translation required)': 'target-notes',
u'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""Czech name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'abbreviation (translation required)': 'abbreviation',
u'ab (translation required)': 'abbreviation',
u'acronym (translation required)': 'acronym',
u'ac (translation required)': 'acronym',
u'code (translation required)': 'code',
u'index (translation required)': 'index',
u'i (translation required)': 'index',
u'subscript (translation required)': 'subscript',
u'sub (translation required)': 'subscript',
u'superscript (translation required)': 'superscript',
u'sup (translation required)': 'superscript',
u'title-reference (translation required)': 'title-reference',
u'title (translation required)': 'title-reference',
u't (translation required)': 'title-reference',
u'pep-reference (translation required)': 'pep-reference',
u'pep (translation required)': 'pep-reference',
u'rfc-reference (translation required)': 'rfc-reference',
u'rfc (translation required)': 'rfc-reference',
u'emphasis (translation required)': 'emphasis',
u'strong (translation required)': 'strong',
u'literal (translation required)': 'literal',
u'math (translation required)': 'math',
u'named-reference (translation required)': 'named-reference',
u'anonymous-reference (translation required)': 'anonymous-reference',
u'footnote-reference (translation required)': 'footnote-reference',
u'citation-reference (translation required)': 'citation-reference',
u'substitution-reference (translation required)': 'substitution-reference',
u'target (translation required)': 'target',
u'uri-reference (translation required)': 'uri-reference',
u'uri (translation required)': 'uri-reference',
u'url (translation required)': 'uri-reference',
u'raw (translation required)': 'raw',}
"""Mapping of Czech role names to canonical role names for interpreted text.
"""
|
ctrlaltdel/neutrinator
|
vendor/stevedore/enabled.py
|
Python
|
gpl-3.0
| 3,569
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from .extension import ExtensionManager
LOG = logging.
|
getLogger(__name__)
class EnabledExtensionManager(ExtensionManager):
"""Loads only plugins that pass a check function.
The check_func argument should return a boolean, with ``True``
indicating that the extension should be loaded and made available
and ``False`` indicating that the extension should be ignored.
:param namespace: The namespace for the entry point
|
s.
:type namespace: str
:param check_func: Function to determine which extensions to load.
:type check_func: callable, taking an :class:`Extension`
instance as argument
:param invoke_on_load: Boolean controlling whether to invoke the
object returned by the entry point after the driver is loaded.
:type invoke_on_load: bool
:param invoke_args: Positional arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_args: tuple
:param invoke_kwds: Named arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_kwds: dict
:param propagate_map_exceptions: Boolean controlling whether exceptions
are propagated up through the map call or whether they are logged and
then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will be called when
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
:param verify_requirements: Use setuptools to enforce the
dependencies of the plugin(s) being loaded. Defaults to False.
:type verify_requirements: bool
"""
def __init__(self, namespace, check_func, invoke_on_load=False,
invoke_args=(), invoke_kwds={},
propagate_map_exceptions=False,
on_load_failure_callback=None,
verify_requirements=False,):
self.check_func = check_func
super(EnabledExtensionManager, self).__init__(
namespace,
invoke_on_load=invoke_on_load,
invoke_args=invoke_args,
invoke_kwds=invoke_kwds,
propagate_map_exceptions=propagate_map_exceptions,
on_load_failure_callback=on_load_failure_callback,
verify_requirements=verify_requirements,
)
def _load_one_plugin(self, ep, invoke_on_load, invoke_args, invoke_kwds,
verify_requirements):
ext = super(EnabledExtensionManager, self)._load_one_plugin(
ep, invoke_on_load, invoke_args, invoke_kwds,
verify_requirements,
)
if ext and not self.check_func(ext):
LOG.debug('ignoring extension %r', ep.name)
return None
return ext
|
desihub/desispec
|
doc/conf.py
|
Python
|
bsd-3-clause
| 10,086
| 0.005057
|
# -*- coding: utf-8 -*-
#
# desispec documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 9 10:43:33 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
import os
import os.path
from importlib import import_module
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../py'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
try:
import sphinx.ext.napoleon
napoleon_extension = 'sphinx.ext.napoleon'
except ImportError:
try:
import sphinxcontrib.napoleon
napoleon_extension = 'sphinxcontrib.napoleon'
needs_sphinx = '1.2'
except ImportError:
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon'
]
# Configuration for intersphinx, copied from astropy.
intersphinx_mapping = {
'python': ('https://docs.python.org/3/', None),
'numpy': ('https://numpy.org/doc/stable/', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),
'matplotlib': ('https://matplotlib.org/', None),
'astropy': ('https://docs.astropy.org/en/stable/', None),
'h5py': ('https://docs.h5py.org/en/latest/', None)
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'desispec'
copyright = '2014-2016, DESI Collaboration'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
__import__(project)
package = sys.modules[project]
# The short X.Y version.
version = package.__version__.split('-', 1)[0]
# The full version, including alpha/beta/rc tags.
release = package.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
# Include functions that begin with an underscore, e.g. _private().
napoleon_include_private_with_doc = True
# This value contains a list of modules to be mocked up. This is useful when
# some external dependencies are not met at build time and break the
# building process.
autodoc_mock_imports = []
for missing in ('astropy', 'astropy.modeling', 'desimodel', 'desitarget',
'desiutil', 'fitsio', 'healpy',
'matplotlib', 'numba', 'numpy', 'redrock', 'requests',
'scipy', 'speclite', 'specter', 'sqlalchemy', 'yaml', 'specex'):
try:
foo = import_module(missing)
except ImportError:
autodoc_mock_imports.append(missing)
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
#html_theme = 'haiku'
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
pass
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any pat
|
hs that contain custom static files (such as style sheets) here,
# relativ
|
e to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'desispecdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble
|
mikeakohn/naken_asm
|
tests/comparison/scripts/gen_riscv.py
|
Python
|
gpl-3.0
| 1,166
| 0.030875
|
#!/usr/bin/env python
import os
def create_asm(instruction):
if instruction.startswith("main:"):
if "ja" in instruction:
instruction = "." + instruction
#print instruction
out = open("temp.asm", "wb")
out.write(" " + instruction + "\n")
out.close()
# --------------------------------- fold here -------------------------------
fp = open("riscv_template.txt", "rb")
out = open("riscv.txt", "wb")
for instruction in fp:
instruction = instruction.strip()
if instruction.startswith(";"): continue
print instruction
create_asm(instruction)
os.system("riscv64-unknown-elf-as temp.asm")
os.system("riscv64-unknown-elf-objcopy -F ihex a.out riscv_gnu.hex"
|
)
#os.system("as-new temp.asm")
#os.system("objcopy -F ihex a.out riscv_gnu.hex")
fp1 = open("riscv_gnu.hex", "rb")
hex = fp1.readline().strip()
#if instruction.startswith("b"):
#l = len(hex)
#old = hex + " " + hex[:l-10] + " " + hex[-2:]
#out.write(old + "\n")
#hex = hex[0:l-10] + hex[-2:]
out.write(instruction + "|" + hex + "\n")
fp1.close
os.remove("a.out")
os.remove("riscv_gnu.hex")
fp.close()
out.close()
os.remove("temp.asm
|
")
|
openstack/python-openstackclient
|
openstackclient/network/v2/l3_conntrack_helper.py
|
Python
|
apache-2.0
| 8,285
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""L3 Conntrack Helper action implementations"""
import logging
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.i18n import _
LOG = logging.getLogger(__name__)
def _get_columns(item):
column_map = {}
hidden_columns = ['location']
return utils.get_osc_show_columns_for_sdk_resource(
item,
column_map,
hidden_columns
)
def _get_attrs(client, parsed_args):
router = client.find_router(parsed_args.router, ignore_missing=False)
attrs = {'router_id': router.id}
if parsed_args.helper:
attrs['helper'] = parsed_args.helper
if parsed_args.protocol:
attrs['protocol'] = parsed_args.protocol
if parsed_args.port:
attrs['port'] = parsed_args.port
return attrs
class CreateConntrackHelper(command.ShowOne):
_description = _("Create a new L3 conntrack helper")
def get_parser(self, prog_name):
parser = super(CreateConntrackHelper, self).get_parser(prog_name)
parser.add_argument(
'router',
metavar='<router>',
help=_('Router for which conntrack helper will be created')
)
parser.add_argument(
'--helper',
required=True,
metavar='<helper>',
help=_('The netfilter conntrack helper module')
)
parser.add_argument(
'--protocol',
required=True,
metavar='<protocol>',
help=_('The network protocol for the netfilter conntrack target '
'rule')
)
parser.add_argument(
'--port',
required=True,
metavar='<port>',
type=int,
help=_('The network port for the netfilter conntrack target rule')
)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.network
attrs = _get_attrs(client, parsed_args)
obj = client.create_conntrack_helper(attrs.pop('router_id'), **attrs)
display_columns, columns = _get_columns(obj)
data = utils.get_item_properties(obj, columns, formatters={})
return (display_columns, data)
class DeleteConntrackHelper(command.Command):
_description = _("Delete L3 conntrack helper")
def get_parser(self, prog_name):
parser = super(DeleteConntrackHelper, self).get_parser(prog_name)
parser.add_argument(
'router',
metavar='<router>',
help=_('Router that the conntrack helper belong to')
)
parser.add_argument(
'conntrack_helper_id',
metavar='<conntrack-helper-id>',
nargs='+',
help=_('The ID of the conntrack helper(s) to delete')
)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.network
result = 0
router = client.find_router(parsed_args.router, ignore_missing=False)
for ct_helper in parsed_args.conntrack_helper_id:
try:
client.delete_conntrack_helper(
ct_helper, router.id, ignore_missing=False)
except Exception as e:
result += 1
LOG.error(_("Failed to delete L3 conntrack helper with "
"ID '%(ct_helper)s': %(e)s"),
{'ct_helper': ct_helper, 'e': e})
if result > 0:
total = len(parsed_args.conntrack_helper_id)
msg = (_("%(result)s of %(total)s L3 conntrack helpers failed "
"to delete.") % {'result': result, 'total': total})
raise exceptions.CommandError(msg)
class ListConntrackHelper(command.Lister):
_description = _("List L3 conntrack helpers")
def get_parser(self, prog_name):
parser = super(ListConntrackHelper, self).get_parser(prog_name)
parser.add_argument(
'router',
metavar='<router>',
help=_('Router that the conntrack helper belong to')
)
parser.add_argument(
'--helper',
metavar='<helper>',
help=_('The netfilter conntrack helper module')
)
parser.add_argument(
'--protocol',
metavar='<protocol>',
help=_('The network protocol for the netfilter conntrack target '
'rule')
)
parser.add_argument(
'--port',
metavar='<port>',
help=_('The network port for the netfilter conntrack target rule')
)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.network
columns = (
'id',
'router_id',
'helper',
'protocol',
'port',
)
column_headers = (
'ID',
'Router ID',
'Helper',
'Protocol',
'Port',
)
attrs = _get_attrs(client, parsed_args)
data = client.conntrack_helpers(attrs.pop('router_id'), **attrs)
return (column_headers,
(utils.get_item_properties(
s, columns, formatters={},
) for s in data))
class SetConntrackHelper(command.Command):
_description = _("Set L3 conntrack helper properties")
def get_parser(self, prog_name):
parser = super(SetConntrackHelper, self).get_parser(prog_name)
parser.add_argument(
'router',
metavar='<router>',
help=_('Router that the conntrack helper belong to')
)
parser.add_argument(
'conntrack_helper_id',
metavar='<conntrack-helper-id>',
help=_('The ID of the conntrack helper(s)')
)
parser.add_argument(
'--helper',
metavar='<helper>',
help=_('The netfilter conntrack helper module')
)
parser.add_argument(
'--protocol',
metavar='<protocol>',
help=_('The network protocol for the netfilter conntrack target '
'rule')
)
parser.add_argument(
'--port',
metavar='<port>',
type=int,
help=_('The network port for the netfilter conntrack target rule')
)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.network
attrs = _get_attrs(client, parsed_args)
if attrs:
client.update_conntrack_helper(
parsed_args.conntrack_helper_id, attrs.pop('router_id'),
|
**attrs)
class ShowConntrackHelper(command.ShowOne):
_description = _("Display L3 conntrack helper details")
def get_parser(self, prog_name):
parser = super(ShowConntrackHelper, self).get_parser(prog_name)
parser.add_argument(
'router',
metavar='<router>',
help=_('Router that the conntrack helper belong to')
)
parser.add_argument(
'conntrack_helper_id',
metavar='<conntrack-helper-id>',
|
help=_('The ID of the conntrack helper')
)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.network
router = client.find_router(parsed_args.router, ignore_missing=False)
obj = client.get_conntrack_helper(
parsed_args.conntrack_helper_id, router.id)
display_columns, columns = _get_columns(obj)
|
eduble/panteda
|
sakura/common/stream.py
|
Python
|
gpl-3.0
| 4,447
| 0.003823
|
import numpy as np, gevent, traceback
from gevent.queue import Queue, Empty
from sakura.common.release import auto_release
from sakura.common.chunk import NumpyChunk
from sakura.common.exactness import EXACT, APPROXIMATE, UNDEFINED, Exactness
def reassemble_chunk_stream(it, dt, chunk_size):
if chunk_size is None:
return it # nothing to do
def reassembled(it):
buf_chunk = NumpyChunk.empty(chunk_size, dt, UNDEFINED)
buf_level = 0
for chunk in it:
if chunk.exact():
# depending on requested chunk_size, we may have to cut this
# chunk into several parts.
while chunk.size > 0:
chunk_part = chunk[:chunk_size-buf_level]
buf_chunk[buf_level:buf_level+chunk_part.size] = chunk_part
buf_level += chunk_part.size
if buf_level == chunk_size:
buf_chunk.exactness = EXACT
yield buf_chunk
buf_level = 0
chunk = chunk[chunk_part.size:]
else:
# size of approximate chunks is lower or equal to chunk_size.
# we concatenate current exact rows of buf_chunk with approximate
# ones of this chunk.
chunk_part = chunk[:chunk_size-buf_level]
buf_chunk[buf_level:buf_level+chunk_part.size] = chunk_part
inexact_chunk = buf_chunk[:buf_level+chunk_part.size]
inexact_chunk.exactness = APPROXIMATE
yield inexact_chunk
if buf_level > 0:
# last exact chunk is the only one which may have a size lower than
# chunk_size
buf_chunk = buf_chunk[:buf_level]
buf_chunk.exactness = EXACT
yield buf_chunk
return reassembled(it)
def normalize_chunk_stream(it):
for chunk in it:
chunk = chunk.view(NumpyChunk)
# if not specified we consider the chunk is exact
if chunk.exactness == UNDEFINED:
chunk.exactness = EXACT
yield chunk
def normalize_value_stream(it):
for val in it:
# if not specified we consider the value is exact
if isinstance(val, tuple) and len(val) == 2 and isinstance(val[1], Exactness):
yield val # val is already a tuple (<row>, <exactness>)
else:
yield val, EXACT
@auto_release
class HardTimerIterator:
def __init__(self, it, timeout):
self._it = it
self._timeout = timeout
self._glet = None
self._in_queue = Queue()
self._out_queue = Queue()
def __iter__(self):
return self
def __next__(self):
if self._glet is None:
self._spawn()
self._in_queue.put(1) # send chunk request
try:
res = self._out_queue.get(timeout = self._timeout)
except Empty:
return None
if isinstance(res, Exception):
raise res
return res
def _spawn(self):
self._glet = gevent.spawn(
|
self._run)
self._out_queue.get() # wait for bg greenlet init
def release(self):
if self._glet is not None:
self._glet.kill() # kill
self._in_queue = None
self._out_queue = None
self._glet = None
|
self._it = None
def _run(self):
in_queue = self._in_queue
out_queue = self._out_queue
it = self._it
try:
# notify caller we are now running
out_queue.put(1)
# run main loop
while True:
# wait for next chunk request
in_queue.get()
# get next chunk and pass it to requester
try:
chunk = next(it)
out_queue.put(chunk)
except gevent.GreenletExit:
raise # end
except StopIteration as e:
out_queue.put(e)
return # exit
except Exception as e:
traceback.print_exc()
out_queue.put(e)
return # exit
except gevent.GreenletExit:
raise # end
# if delay between two chunks reaches timeout,
# yield a None value.
def apply_hard_timer_to_stream(it, timeout):
return HardTimerIterator(it, timeout)
|
census-instrumentation/opencensus-python
|
tests/unit/trace/test_tracer.py
|
Python
|
apache-2.0
| 9,886
| 0
|
# Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
from opencensus.trace import samplers, span_data
from opencensus.trace import tracer as tracer_module
class TestTracer(unittest.TestCase):
def test_constructor_default(self):
from opencensus.trace import print_exporter
from opencensus.trace.propagation \
import trace_context_http_header_format
from opencensus.trace.samplers import ProbabilitySampler
from opencensus.trace.span_context import SpanContext
from opencensus.trace.tracers import noop_tracer
tracer = tracer_module.Tracer()
assert isinstance(tracer.span_context, SpanContext)
assert isinstance(tracer.sampler, ProbabilitySampler)
assert isinstance(tracer.exporter, print_exporter.PrintExporter)
assert isinstance(
tracer.propagator,
trace_context_http_header_format.TraceContextPropagator)
assert isinstance(tracer.tracer, noop_tracer.NoopTracer)
def test_constructor_explicit(self):
from opencensus.trace.tracers import noop_tracer
sampler = mock.Mock()
sampler.should_sample.return_value = False
exporter = mock.Mock()
propagator = mock.Mock()
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
span_context=span_context,
sampler=sampler,
exporter=exporter,
propagator=propagator)
self.assertIs(tracer.span_context, span_context)
self.assertIs(tracer.sampler, sampler)
self.assertIs(tracer.exporter, exporter)
self.assertIs(tracer.propagator, propagator)
assert isinstance(tracer.tracer, noop_tracer.NoopTracer)
def test_should_sample_force_not_trace(self):
span_context = mock.Mock()
span_context.trace_options.enabled = False
sampler = mock.Mock()
sampler.should_sample.return_value = False
tracer = tracer_module.Tracer(
span_context=span_context, sampler=sampler)
sampled = tracer.should_sample()
self.assertFalse(sampled)
def test_should_sample_sampled(self):
sampler = mock.Mock()
sampler.should_sample.return_value = True
tracer = tracer_module.Tracer(sampler=sampler)
sampled = tracer.should_sample()
self.assertTrue(sampled)
def test_should_sample_not_sampled(self):
sampler = mock.Mock()
sampler.should_sample.return_value = False
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
span_context=span_context, sampler=sampler)
sampled = tracer.should_sample()
self.assertFalse(sampled)
def test_get_tracer_noop_tracer(self):
from opencensus.trace.tracers import noop_tracer
sampler = mock.Mock()
sampler.should_sample.return_value = False
tracer = tracer_module.Tracer(sampler=sampler)
result = tracer.get_tracer()
assert isinstance(result, noop_tracer.NoopTracer)
def test_get_tracer_context_tracer(self):
from opencensus.trace.tracers import context_tracer
sampler = mock.Mock()
sampler.should_sample.return_value = True
tracer = tracer_module.Tracer(sampler=sampler)
result = tracer.get_tracer()
assert isinstance(result, context_tracer.ContextTracer)
self.assertTrue(tracer.span_context.trace_options.enabled)
def test_finish_not_sampled(self):
from opencensus.trace.tracers import noop_tracer
sampler = mock.Mock()
sampler.should_sample.return_value = False
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
span_context=span_context, sampler=sampler)
assert isinstance(tracer.tracer, noop_tracer.NoopTracer)
mock_tracer = mock.Mock()
tracer.tracer = mock_tracer
tracer.finish()
self.assertTrue(mock_tracer.finish.called)
def test_finish_sampled(self):
from opencensus.trace.tracers import context_tracer
sampler = mock.Mock()
sampler.should_sample.return_value = True
tracer = tracer_module.Tracer(sampler=sampler)
assert isinstance(tracer.tracer, context_tracer.ContextTracer)
mock_tracer = mock.Mock()
tracer.tracer = mock_tracer
tracer.finish()
self.assertTrue(mock_tracer.finish.called)
def test_span_not_sampled(self):
from opencensus.trace.blank_span import BlankSpan
sampler = mock.Mock()
sampler.should_sample.return_value = False
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
span_context=span_context, sampler=sampler)
span = tracer.span()
# Test nested span not sampled
child_span = span.span()
tracer.finish()
assert isinstance(span, BlankSpan)
assert isinstance(child_span, BlankSpan)
def test
|
_span_sampled(self):
sampler = mock.Mock()
sampler.should_sample.return_value = True
tracer = tracer_module.Tracer(sampler=sampler)
tracer_mock = mock.Mock()
tracer.tracer = tracer_mock
tracer.span()
self.assertTrue(tracer_mock.span.called)
def test_start_span_not_sampled(self):
from opencensus.t
|
race.blank_span import BlankSpan
sampler = mock.Mock()
sampler.should_sample.return_value = False
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
span_context=span_context, sampler=sampler)
span = tracer.start_span()
assert isinstance(span, BlankSpan)
def test_start_span_sampled(self):
from opencensus.trace import span as trace_span
sampler = mock.Mock()
sampler.should_sample.return_value = True
tracer = tracer_module.Tracer(sampler=sampler)
span = tracer.start_span()
assert isinstance(span, trace_span.Span)
def test_end_span_not_sampled(self):
sampler = mock.Mock()
sampler.should_sample.return_value = False
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
sampler=sampler, span_context=span_context)
tracer.end_span()
self.assertFalse(span_context.span_id.called)
def test_end_span_sampled(self):
from opencensus.trace import execution_context
sampler = mock.Mock()
sampler.should_sample.return_value = True
tracer = tracer_module.Tracer(sampler=sampler)
span = mock.Mock()
span.attributes = {}
span.annotations = []
span.message_events = []
span.links = []
span.children = []
span.__iter__ = mock.Mock(return_value=iter([span]))
execution_context.set_current_span(span)
with mock.patch('opencensus.trace.span.utils.get_truncatable_str'):
tracer.end_span()
self.assertTrue(span.finish.called)
def test_current_span_not_sampled(self):
from opencensus.trace.blank_span import BlankSpan
sampler = mock.Mock()
sampler.should_sample.return_value = False
span_context = mock.Mock()
span_context.trace_options.enabled = False
tracer = tracer_module.Tracer(
sampler=sampler, span_context=
|
mikewrock/phd_backup_full
|
build/selected_points_publisher/catkin_generated/pkg.installspace.context.pc.py
|
Python
|
apache-2.0
| 387
| 0
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFI
|
X = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "selected_points_publisher"
PROJECT_SPACE_DIR = "/home/m
|
ike/catkin_ws/install"
PROJECT_VERSION = "1.0.0"
|
benosteen/django-databank
|
src/frontend/utils/_old/ident_md.py
|
Python
|
mit
| 1,379
| 0.000725
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2012 University of Oxford
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER I
|
N AN ACTION OF CONTRACT,
TORT OR OTHERWISE, A
|
RISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from rdfdatabank.config.users import _USERS as _DATA
class IdentMDProvider(object):
def add_metadata(self, environ, identity):
userid = identity.get('repoze.who.userid')
info = _DATA.get(userid)
if info is not None:
identity.update(info)
|
wheeler-microfluidics/mr-box-peripheral-board.py
|
mr_box_peripheral_board/ui/gtk/measure_dialog.py
|
Python
|
mit
| 7,247
| 0.001932
|
import datetime as dt
import threading
from serial_device.or_event import OrEvent
import numpy as np
import pandas as pd
import gobject
import gtk
import matplotlib as mpl
from streaming_plot import StreamingPlot
from ...max11210_adc_ui import MAX11210_read
import logging
def _generate_data(stop_event, data_ready, data):
'''
Generate random data to emulate, e.g., reading data from ADC.
The function is an example implementation of a ``f_data`` function
suitable for use with the :func:`measure_dialog` function.
Example usage
-------------
The following launches a measurement dialog which plots 5 points every
0.5 seconds, runs for 5 seconds, after which the dialog closes
automatically:
>>> data = measure_dialog(_generate_data, duration_s=5000, auto_close=True)
Parameters
----------
stop_event : threading.Event
Function returns when :data:`stop_event` is set.
data_ready : threading.Event
Function sets :data:`data_ready` whenever new data is available.
data : list
Function appends new data to :data:`data` before setting
:data:`data_ready`.
'''
delta_t = dt.timedelta(seconds=.1)
samples_per_plot = 5
while True:
time_0 = dt.datetime.now()
values_i = np.random.rand(samples_per_plot)
absolute_times_i = pd.Series([time_0 + i * delta_t
for i in xrange(len(values_i))])
data_i = pd.Series(values_i, index=absolute_times_i)
data.append(data_i)
data_ready.set()
if stop_event.wait(samples_per_plot *
delta_t.total_seconds()):
break
def measure_dialog(f_data, duration_s=None, auto_start=True,
auto_close=True, **kwargs):
'''
Launch a GTK dialog and plot data
Parameters
----------
f_data : function(stop_event, data_ready, data)
Function to run to generate data, e.g., read data from ADC.
The function is run in its own thread and is provided the following
parameters:
- :data:`stop_event` : threading.Event
- :data:`data_ready` : threading.Event
- :data:`data` : list
The function **MUST**:
- Return when the :data:`stop_event` is set.
- Set :data:`data_ready` event whenever new data is available.
duration_s : float, optional
Length of time to measure for (in seconds).
If duration is not specified, measure until window is closed or
``Pause`` button is pressed.
auto_start : bool, optional
Automatically start measuring when the dialog is launched.
Default is ``True``.
auto_close : bool, optional
If ``duration_s`` is specified, automatically close window once the
measurement duration has passed (unless the ``Pause`` button has been
pressed.
Default is ``True``.
**kwargs : dict
Additional keyword arguments are passed to the construction of the
:class:`streaming_plot.StreamingPlot` view.
'''
# `StreamingPlot` class uses threads. Need to initialize GTK to use
# threads. See [here][1] for more information.
#
# [1]: http://faq.pygtk.org/index.py?req=show&file=faq20.001.htp
gtk.gdk.threads_init()
with mpl.style.context('seaborn',
{'image.cmap': 'gray',
'image.interpolation' : 'none'}):
# Create dialog window to wrap PMT measurement view widget.
dialog = gtk.Dialog()
dialog.set_default_size(800, 600)
view = StreamingPlot(data_func=f_data, **kwargs)
dialog.get_content_area().pack_start(view.widget, True, True)
dialog.connect('check-resize', lambda *args: view.on_resize())
dialog.set_position(gtk.WIN_POS_MOUSE)
dialog.show_all()
view.fig.tight_layout()
if auto_start:
gobject.idle_add(view.start)
def _auto_close(*args):
if not view.stop_event.is_set():
# User did not explicitly pause the measurement. Automatically
# close the measurement and continue.
dialog.destroy()
measurement_complete = threading.Event()
view.widget.connect('destroy', lambda *args: measurement_complete.set())
if duration_s is not None:
def _schedule_stop(*args):
event = OrEvent(view.stop_event, view.started,
measurement_complete)
event.wait()
if view.started.is_set():
stop_func = _auto_close if auto_close else view.pause
gobject.timeout_add(duration_s * 1000, stop_func)
stop_schedule_thread = threading.Thread(target=_schedule_stop)
stop_schedule_thread.daemon = True
stop_schedule_thread.start()
dialog.run()
dialog.destroy()
measurement_complete.wait()
if view.data:
return pd.concat(view.data)
else:
return None
return False
def adc_data_func_factory(proxy, delta_t=dt.timedelta(seconds=1), adc_rate=1,
resistor_val = False):
'''
Parameters
----------
proxy : mr_box_peripheral_board.SerialProxy
delta_t : datetime.timedelta
Time between ADC measurements.
Returns
-------
function
Function suitable for use with the :func:`measu
|
re_dialog` function.
'''
#set the adc digital gain
# proxy.MAX11210_setGain(adc_dgain)
|
#Set the pmt shutter pin to output
proxy.pin_mode(9, 1)
logger = logging.getLogger(__name__)
def _read_adc(stop_event, data_ready, data):
'''
Parameters
----------
stop_event : threading.Event
Function returns when :data:`stop_event` is set.
data_ready : threading.Event
Function sets :data:`data_ready` whenever new data is available.
data : list
Function appends new data to :data:`data` before setting
:data:`data_ready`.
delta_t = dt.timedelta(seconds=.1)
'''
#Start the ADC
try:
proxy.pmt_open_shutter()
logger.info('PMT Shutter Opened')
adc_dgain = 1
while True:
data_i = MAX11210_read(proxy, rate=adc_rate,
duration_s=delta_t.total_seconds())
#Convert data to Voltage, 24bit ADC with Vref = 3.0 V
data_i /= ((2 ** 24 - 1)/(3.0/adc_dgain))
if (resistor_val):
#Convert Voltage to Current, 30kOhm Resistor
data_i /= 30e3
else:
#Convert Voltage to Current, 300kOhm Resistor
data_i /= 300e3
# Set name to display units.
data_i.name = 'Current (A)'
data.append(data_i)
data_ready.set()
if stop_event.is_set():
break
finally:
proxy.pmt_close_shutter()
logger.info('PMT Shutter Closed')
return _read_adc
|
Comcast/rulio
|
examples/actionendpoint.py
|
Python
|
apache-2.0
| 2,070
| 0.004831
|
#!/usr/bin/python
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# End Copyright
# An example action endpoint for rules with language="POST". This
# example is NOT an action executor. Instead, it's just an endpoint
# in the role of any external system that deals directly with JSON
# bodies.
# curl -d '{"likes":"tacos"}' http://localhost:6667/
from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
# import
|
json
PORT = 6667
def protest (response, message):
response.send_response(200)
response.send_header('Content-type','application/json')
response.end_headers()
response.wfile.write(message)
class handler(BaseHTTPRequestHandler):
def do_GET(self):
protest(self, "You should POST with json.\n")
return
def do_POST(self):
try:
co
|
ntent_len = int(self.headers.getheader('content-length'))
body = self.rfile.read(content_len)
print 'body ', body
self.send_response(200)
self.send_header('Content-type','application/json')
self.end_headers()
response = '{"Got":%s}' % (body)
self.wfile.write(response)
except Exception as broke:
protest(self, str(broke))
try:
server = HTTPServer(('', PORT), handler)
print 'Started example action endpoint on port ' , PORT
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down example action endpoint on ', PORT
server.socket.close()
|
pmisik/buildbot
|
master/buildbot/db/base.py
|
Python
|
gpl-2.0
| 5,687
| 0.000528
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
|
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTAB
|
ILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import hashlib
import itertools
import sqlalchemy as sa
from buildbot.util import unicode2bytes
class DBConnectorComponent:
# A fixed component of the DBConnector, handling one particular aspect of
# the database. Instances of subclasses are assigned to attributes of the
# DBConnector object, so that they are available at e.g.,
# C{master.db.model} or C{master.db.changes}. This parent class takes care
# of the necessary backlinks and other housekeeping.
connector = None
data2db = {}
def __init__(self, connector):
self.db = connector
# set up caches
for method in dir(self.__class__):
o = getattr(self, method)
if isinstance(o, CachedMethod):
setattr(self, method, o.get_cached_method(self))
@property
def master(self):
return self.db.master
_isCheckLengthNecessary = None
def checkLength(self, col, value):
if not self._isCheckLengthNecessary:
if self.db.pool.engine.dialect.name == 'mysql':
self._isCheckLengthNecessary = True
else:
# not necessary, so just stub out the method
self.checkLength = lambda col, value: None
return
assert col.type.length, f"column {col} does not have a length"
if value and len(value) > col.type.length:
raise RuntimeError(f"value for column {col} is greater than max of {col.type.length} "
f"characters: {value}")
def ensureLength(self, col, value):
assert col.type.length, f"column {col} does not have a length"
if value and len(value) > col.type.length:
value = value[:col.type.length // 2] + \
hashlib.sha1(unicode2bytes(value)).hexdigest()[:col.type.length // 2]
return value
# returns a Deferred that returns a value
def findSomethingId(self, tbl, whereclause, insert_values,
_race_hook=None, autoCreate=True):
d = self.findOrCreateSomethingId(tbl, whereclause, insert_values,
_race_hook, autoCreate)
d.addCallback(lambda pair: pair[0])
return d
def findOrCreateSomethingId(self, tbl, whereclause, insert_values,
_race_hook=None, autoCreate=True):
"""
Find a matching row and if one cannot be found optionally create it.
Returns a deferred which resolves to the pair (id, found) where
id is the primary key of the matching row and `found` is True if
a match was found. `found` will be false if a new row was created.
"""
def thd(conn, no_recurse=False):
# try to find the master
q = sa.select([tbl.c.id],
whereclause=whereclause)
r = conn.execute(q)
row = r.fetchone()
r.close()
# found it!
if row:
return row.id, True
if not autoCreate:
return None, False
if _race_hook is not None:
_race_hook(conn)
try:
r = conn.execute(tbl.insert(), [insert_values])
return r.inserted_primary_key[0], False
except (sa.exc.IntegrityError, sa.exc.ProgrammingError):
# try it all over again, in case there was an overlapping,
# identical call, but only retry once.
if no_recurse:
raise
return thd(conn, no_recurse=True)
return self.db.pool.do(thd)
def hashColumns(self, *args):
def encode(x):
if x is None:
return b'\xf5'
elif isinstance(x, str):
return x.encode('utf-8')
return str(x).encode('utf-8')
return hashlib.sha1(b'\0'.join(map(encode, args))).hexdigest()
def doBatch(self, batch, batch_n=500):
iterator = iter(batch)
while True:
batch = list(itertools.islice(iterator, batch_n))
if not batch:
break
yield batch
class CachedMethod:
def __init__(self, cache_name, method):
self.cache_name = cache_name
self.method = method
def get_cached_method(self, component):
meth = self.method
meth_name = meth.__name__
cache = component.db.master.caches.get_cache(self.cache_name,
lambda key: meth(component, key))
def wrap(key, no_cache=0):
if no_cache:
return meth(component, key)
return cache.get(key)
wrap.__name__ = meth_name + " (wrapped)"
wrap.__module__ = meth.__module__
wrap.__doc__ = meth.__doc__
wrap.cache = cache
return wrap
def cached(cache_name):
return lambda method: CachedMethod(cache_name, method)
|
singleswitch/ticker
|
settings_editor.py
|
Python
|
mit
| 9,463
| 0.012575
|
#FIXME: UNDO, click time at end to undo
from PyQt4 import QtCore, QtGui
import sys, os
import volume_editor_layout, settings_layout, cPickle
import numpy as np
from utils import Utils
class SettingsEditWidget(QtGui.QDialog, settings_layout.Ui_Dialog):
#################################################### Init
def __init__(self, i_parent=None):
QtGui.QDialog.__init__(self, i_parent)
self.setupUi(self)
QtCore.QObject.connect( self.box_enable_learning, QtCore.SIGNAL("toggled(bool)"), self.setEnableLearning)
QtCore.QObject.connect( self.box_seconds_delay, QtCore.SIGNAL("valueChanged(double)"),self.editClickParamsEvent)
QtCore.QObject.connect( self.box_click_dev, QtCore.SIGNAL("valueChanged(double)"),self.editClickParamsEvent)
############################################### Main
def editClickParamsEvent(self, i_value):
self.emit(QtCore.SIGNAL("edit_click_params"))
def closeEvent(self, event):
QtGui.QDialog.close(self)
self.emit(QtCore.SIGNAL("close_settings"))
def clickPdfToSettingsParams(self, i_params):
"""Convert click pdf parameters to the ones stored in settings editor."""
(delay, std, fr, fp_rate) = i_params
fr *= 100.0
fp_rate *= 60.0
return (delay, std, fr, fp_rate)
def settingsToClickPdfParams(self, i_params):
"""Convert settings editorparameters to the ones stored by click pdf."""
(delay, std, fr, fp_rate) = i_params
fr /= 100.0
fp_rate /= 60.0
return (delay, std, fr, fp_rate)
################################################ Get
def getSettings(self):
settings = {}
#Click-time delay
delay = self.box_seconds_delay.value()
std = self.box_click_dev.value()
settings['is_train'] = self.box_enable_learning.isChecked()
settings['learning_rate'] = self.box_learning_rate.value()
settings['learn_delay'] = self.box_learn_delay.isChecked()
settings['learn_std'] = self.box_learn_std.isChecked()
#Switch noise
fp_rate = self.box_fp_rate.value()
fr = self.box_fr.value()
settings['learn_fp'] = self.box_learn_fp.isChecked()
settings['learn_fr'] = self.box_learn_fr.isChecked()
#Do the conversion
click_params = (delay, std, fr, fp_rate)
(settings['delay'], settings['std'], settings['fr'], settings['fp_rate']) = self.settingsToClickPdfParams(click_params)
#Error correction
settings['undo'] = self.box_undo.value()
settings['prog_status'] = self.box_prog_status.value()
settings['restart_word'] = self.box_restart_word.value()
settings['shut_down'] = self.box_shut_down.value()
settings['word_select_thresh'] = self.box_word_select.value()
#Speed & channels
settings['file_length'] = self.box_file_length.value()
settings['channel_index'] = int(self.box_channels.currentIndex())
settings['end_delay'] = self.box_end_delay.value()
return sett
|
ings
def getCurrentChannel(self):
return self.getChannel(self.box_channels.currentIndex())
def getChannel(self, i_index):
return
|
int(self.box_channels.itemText(i_index))
#################################################### Set
def setSettings(self, i_settings):
#Get the parameters
click_params = (i_settings['delay'], i_settings['std'], i_settings['fr'], i_settings['fp_rate'])
(delay, std, fr, fp_rate) = self.clickPdfToSettingsParams(click_params)
self.setClickParams((delay, std, fr, fp_rate))
#More click-time params
self.box_enable_learning.setChecked(i_settings['is_train'])
self.box_learning_rate.setValue(i_settings['learning_rate'])
self.box_learn_delay.setChecked( i_settings['learn_delay'])
self.box_learn_std.setChecked(i_settings['learn_std'])
#More switch noise params
self.box_learn_fp.setChecked(i_settings['learn_fp'])
self.box_learn_fr.setChecked(i_settings['learn_fr'])
#Error correction
self.box_undo.setValue(i_settings['undo'])
self.box_prog_status.setValue(i_settings['prog_status'])
self.box_restart_word.setValue(i_settings['restart_word'])
self.box_shut_down.setValue(i_settings['shut_down'])
self.box_word_select.setValue(i_settings['word_select_thresh'])
#Speed & channels
self.box_file_length.setValue(i_settings['file_length'])
self.box_channels.setCurrentIndex(i_settings['channel_index'])
self.box_end_delay.setValue(i_settings['end_delay'])
def setClickParams(self, i_params):
(delay, std, fr, fp_rate) = i_params
self.box_seconds_delay.setValue(delay)
self.box_click_dev.setValue(std)
self.box_fp_rate.setValue(fp_rate)
self.box_fr.setValue(fr)
def setEnableLearning(self, i_checked):
self.box_learn_delay.setChecked(i_checked)
self.box_learn_std.setChecked(i_checked)
self.box_learn_fp.setChecked(i_checked)
self.box_learn_fr.setChecked(i_checked)
class VolumeEditWidget(QtGui.QDialog, volume_editor_layout.Ui_Dialog):
##################################### Init
def __init__(self, i_parent=None):
QtGui.QDialog.__init__(self, i_parent)
self.setupUi(self)
self.volumes = []
for n in range(0, 5):
slider = getattr(self, "volume_settings_" + str(n))
self.volumes.append(slider.value())
func_vol = getattr(self, "setVolume" + str(n))
func_mute = getattr(self, "mute" + str(n))
box = getattr(self, "box_mute_" + str(n))
QtCore.QObject.connect( slider, QtCore.SIGNAL("sliderReleased()"), func_vol)
QtCore.QObject.connect( box, QtCore.SIGNAL("toggled(bool)"), func_mute)
QtCore.QObject.connect( self.box_mute_all, QtCore.SIGNAL("toggled(bool)"), self.muteAll)
########################################### Signal/slots
def mute0(self, i_checked):
self.mute(0, i_checked)
def mute1(self, i_checked):
self.mute(1, i_checked)
def mute2(self, i_checked):
self.mute(2, i_checked)
def mute3(self, i_checked):
self.mute(3, i_checked)
def mute4(self, i_checked):
self.mute(4, i_checked)
def setVolume0(self):
self.setVolume(0)
def setVolume1(self):
self.setVolume(1)
def setVolume2(self):
self.setVolume(2)
def setVolume3(self):
self.setVolume(3)
def setVolume4(self):
self.setVolume(4)
########################################## Get
def getVolume(self, i_channel):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
val = float(slider_object.value()) / 1000.0
return val
########################################## Set
def setVolume(self, i_channel, i_save_volume=True):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
slider_val = slider_object.value()
val = float(slider_val) / 1000.0
if i_save_volume:
self.volumes[i_channel] = slider_val
self.emit(QtCore.SIGNAL("volume(float,int)"), float(val), int(i_channel))
def setChannelConfig(self, i_channel_config):
nchannels = i_channel_config.getChannels()
channel_names = i_channel_config.getChannelNames()
for n in range(0, nchannels):
label_object = getattr(self, "volume_label_" + str(n))
label_object.setText(QtCore.QString(channel_names[n][0]))
label_object.show()
slider_object = getattr(self, "volume_settings_" + str(n))
slider_object.show()
for n in range(nchannels, 5):
object_name = "volume_label_" + str(n)
label_object = getattr(self, object_name)
label_object.hide()
slider_object = getattr(s
|
111pontes/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_infra_objmgr_oper.py
|
Python
|
apache-2.0
| 57,513
| 0.016987
|
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFEREN
|
CE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'EndPortEnum' : _MetaInfoEnum('EndPortE
|
num', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper',
{
'echo':'echo',
'discard':'discard',
'daytime':'daytime',
'chargen':'chargen',
'ftp-data':'ftp_data',
'ftp':'ftp',
'ssh':'ssh',
'telnet':'telnet',
'smtp':'smtp',
'time':'time',
'nicname':'nicname',
'tacacs':'tacacs',
'domain':'domain',
'gopher':'gopher',
'finger':'finger',
'www':'www',
'host-name':'host_name',
'pop2':'pop2',
'pop3':'pop3',
'sun-rpc':'sun_rpc',
'ident':'ident',
'nntp':'nntp',
'bgp':'bgp',
'irc':'irc',
'pim-auto-rp':'pim_auto_rp',
'exec':'exec_',
'login':'login',
'cmd':'cmd',
'lpd':'lpd',
'uucp':'uucp',
'klogin':'klogin',
'kshell':'kshell',
'talk':'talk',
'ldp':'ldp',
}, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']),
'PortEnum' : _MetaInfoEnum('PortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper',
{
'echo':'echo',
'discard':'discard',
'daytime':'daytime',
'chargen':'chargen',
'ftp-data':'ftp_data',
'ftp':'ftp',
'ssh':'ssh',
'telnet':'telnet',
'smtp':'smtp',
'time':'time',
'nicname':'nicname',
'tacacs':'tacacs',
'domain':'domain',
'gopher':'gopher',
'finger':'finger',
'www':'www',
'host-name':'host_name',
'pop2':'pop2',
'pop3':'pop3',
'sun-rpc':'sun_rpc',
'ident':'ident',
'nntp':'nntp',
'bgp':'bgp',
'irc':'irc',
'pim-auto-rp':'pim_auto_rp',
'exec':'exec_',
'login':'login',
'cmd':'cmd',
'lpd':'lpd',
'uucp':'uucp',
'klogin':'klogin',
'kshell':'kshell',
'talk':'talk',
'ldp':'ldp',
}, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']),
'PortOperatorEnum' : _MetaInfoEnum('PortOperatorEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper',
{
'equal':'equal',
'not-equal':'not_equal',
'greater-than':'greater_than',
'less-than':'less_than',
}, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']),
'StartPortEnum' : _MetaInfoEnum('StartPortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper',
{
'echo':'echo',
'discard':'discard',
'daytime':'daytime',
'chargen':'chargen',
'ftp-data':'ftp_data',
'ftp':'ftp',
'ssh':'ssh',
'telnet':'telnet',
'smtp':'smtp',
'time':'time',
'nicname':'nicname',
'tacacs':'tacacs',
'domain':'domain',
'gopher':'gopher',
'finger':'finger',
'www':'www',
'host-name':'host_name',
'pop2':'pop2',
'pop3':'pop3',
'sun-rpc':'sun_rpc',
'ident':'ident',
'nntp':'nntp',
'bgp':'bgp',
'irc':'irc',
'pim-auto-rp':'pim_auto_rp',
'exec':'exec_',
'login':'login',
'cmd':'cmd',
'lpd':'lpd',
'uucp':'uucp',
'klogin':'klogin',
'kshell':'kshell',
'talk':'talk',
'ldp':'ldp',
}, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']),
'ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup' : {
'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup',
False,
[
_MetaInfoClassMember('nested-group-name', ATTRIBUTE, 'str' , None, None,
[(1, 64)], [],
''' Nested object group
''',
'nested_group_name',
'Cisco-IOS-XR-infra-objmgr-oper', True),
_MetaInfoClassMember('nested-group-name-xr', ATTRIBUTE, 'str' , None, None,
[], [],
''' Nested group
''',
'nested_group_name_xr',
'Cisco-IOS-XR-infra-objmgr-oper', False),
],
'Cisco-IOS-XR-infra-objmgr-oper',
'nested-group',
_yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper'
),
},
'ObjectGroup.Port.Objects.Object.NestedGroups' : {
'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.NestedGroups',
False,
[
_MetaInfoClassMember('nested-group', REFERENCE_LIST, 'NestedGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup',
[], [],
''' nested object group
''',
'nested_group',
'Cisco-IOS-XR-infra-objmgr-oper', False),
],
'Cisco-IOS-XR-infra-objmgr-oper',
'nested-groups',
_yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper'
),
},
'ObjectGroup.Port.Objects.Object.Operators.Operator' : {
'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.Operators.Operator',
False,
[
_MetaInfoClassMember('operator-type', REFERENCE_ENUM_CLASS, 'PortOperatorEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'PortOperatorEnum',
[], [],
''' operation for ports
''',
'operator_type',
'Cisco-IOS-XR-infra-objmgr-oper', False),
_MetaInfoClassMember('operator-type-xr', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Operator
''',
'operator_type_xr',
'Cisco-IOS-XR-infra-objmgr-oper', False),
_MetaInfoClassMember('port', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Port number
''',
'port',
'Cisco-IOS-XR-infra-objmgr-oper', False, [
_MetaInfoClassMember('port', REFERENCE_ENUM_CLASS, 'PortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'PortEnum',
[], [],
''' Port number
''',
'port',
'Cisco-IOS-XR-infra-objmgr-oper', False),
_MetaInfoClassMember('port', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Port number
''',
'port',
'Cisco-IOS-XR-infra-objmgr-oper', False),
]),
|
Mimino666/tc-marathoner
|
marathoner/utils/ossignal.py
|
Python
|
mit
| 765
| 0
|
import signal
signal_names = {}
for signame in dir(signal):
if signame.startswith('SIG'):
signum = getattr(signal, signame)
if isinstance(signum, int):
signal_names[signum] = signame
def get_signal_name(signal_code):
name = signal_names.get(signal_code, '')
if name:
return '%s (%s)' % (signal_code, name)
else:
return '%s' % signal_code
def install_shutdown_handlers(func):
'''Install the giv
|
en function as a signal handler for all common shutdown
signals (such as SIGI
|
NT, SIGTERM, etc).
'''
signal.signal(signal.SIGTERM, func)
signal.signal(signal.SIGINT, func)
# Catch Ctrl-Break in windows
if hasattr(signal, 'SIGBREAK'):
signal.signal(signal.SIGBREAK, func)
|
micronicstraining/python
|
module_3/lesson_3/practice.py
|
Python
|
agpl-3.0
| 3,261
| 0.003068
|
#! /usr/bin/env python3
# Create a rotate by 13 encoder - http://www.rot13.com/
# Use:
# hint use codecs.encode. look it up in the documentation
import codecs
def rot13_encode(data):
""" Take in unencoded data and rot13 and return new data """
return codecs.encode(data, 'rot13')
# What will be the output?
hello_world_rotated = rot13_encode("Hello World")
print(hello_world_rotated)
# Reverse a string using a slice
# use [::-1]
some_str = 'Hello World'
print(some_str[::-1])
# Reverse a string using reversed keyword
#>>> reversed('Hello World')
#<reversed object at 0x7f225393a630>
#>>> print(reversed('Hello World'))
#<reversed object at 0x7f225393a6d8>
#>>> '.'.join(reversed('Hello World'))
#'d.l.r.o.W. .o.l.l.e.H'
#>>> ''.join(reversed('Hello World'))
#'dlroW olleH'
#>>>
# Between last two which is more 'pythonic'? Offset
# What is wrong with the following code:
# >>> (1,2) + (1)
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# TypeError: can only concatenate tuple (not "int") to tuple
# Note: the intent is to concat to the first tuple
# How can you fix it?
# >>> (1,2) + (1,)
# (1, 2, 1)
# Create a dictionary with the country followed by it's capital city
# for the following countries:
# China, Germany, Greece, Russia, United Kingdom
# Print out only the values
# >>> ord('A')
# 65
# ord('A') - ord('\0')
# 65
# What do you think ord does? Can you apply it to a list in a loop?
# Can you apply it using a map?
# >>> list(map(ord, 'Hello'))
# [72, 101, 108, 108, 111]
# >>>
# What if ou had a word and you wanted to convert it to a stream of bits
# How can you do this using ordinal and map?
# >>> list(map(bin, map(ord, 'Hello')))
# ['0b1001000', '0b1100101', '0b1101100', '0b1101100', '0b1101111']
# datetime practice
# from datetime import datetime
# how can you get current time?
# how can you calculate number of days since your birthday?
# hackerrank datetime problems
# In order to send email you can use the send-mail transfer protocol (SMTP)
# there is a built-in library for this
# What do you think the following code does:
# >>> import smtplib
# >>> s = smtplib.SMTP('localhost', 1025)
# >>> s.sendmail('me@example.com', 'you@example.com', "Python is awesome!")
# Open wireshark and start capturing
# Create a script with above and launch it with:
# python -m smtpd -n -c DebuggingServer localhost:1025
# See if you can find SMTP packet sent
# syslogd is
# a socket is a TCP or UDP connection to an ip address/port which lets you
# send and recieve data
# what do ou think the following code does?
# >>> from socket import *
# >>> syslogd = socket(AF_INET, SOCK_DGRAM)
# >>> syslogd.bind(('localhost', 514))
# >>> while True:
# ... message, source = syslogd.recvfrom(2048)
# ... print "[%s] %s" % (source[0], message)
#
# Create a class called Address
# It hsould have a line 1, line 2, city, state, zip, c
|
ountry
# over ride the __repr__ to print out a well formatted address.
#
# Create a class call Customer
# It should have a first name, last name, ema
|
il and address object
#
# SKIP NEXT ONE:
# Create a product class. It hsould have an item name and cost.
# Override the __add__ method so products can be added to each other
#
# - closure, lambda review
#
|
jarcodallo/custom_modules
|
legacy_clients/__openerp__.py
|
Python
|
gpl-2.0
| 229
| 0.004405
|
{
'nam
|
e': "Legacy Partner integration",
'version': "1.1",
'author': "José A. Ramírez",
'category': "Tools",
'depends': ['base'],
'data': ['legacy_partner.xml'],
'demo': [],
|
'installable': True,
}
|
iw3hxn/LibrERP
|
stock_picking_extended/models/inherit_stock_location.py
|
Python
|
agpl-3.0
| 5,169
| 0.002902
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010-2012 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
# Copyright (C) 2014 Didotech srl
# (<http://www.didotech.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from datetime import datetime
from openerp import SUPERUSER_ID
from openerp.osv import orm, fields
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
class stock_location(orm.Model):
_inherit = "stock.location"
_columns = {
'update_product_bylocation': fields.boolean('Show Product location quantity on db', help='If check create a columns on product_product table for get product for this location'),
'product_related_columns': fields.char('Columns Name on product_product')
}
def update_product_by_location(self, cr, uid, context=None):
context = context or self.pool['res.users'].context_get(cr, uid)
location_ids = self.search(cr, uid, [('update_product_bylocation', '=', True)], context=context)
location_vals = {}
start_time = datetime.now()
date_product_by_location_update = start_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if location_ids:
product_obj = self.pool['product.product']
for location in self.browse(cr, uid, location_ids, context):
location_vals[location.id] = location.product_related_columns
product_ids = product_obj.search(cr, uid, [('type', '!=', 'service')], context=context)
product_context = context.copy()
product_vals = {}
for product_id in product_ids:
product_vals[product_id] = {}
for location_keys in location_vals.keys():
product_context['location'] = location_keys
for product in product_obj.browse(cr, uid, product_ids, product_context):
if location_vals[location_keys] and (product[location_vals[location_keys]] != product.qty_available):
product_vals[product.id][location_vals[location_keys]] = product.qty_available
if product_
|
vals:
for product_id in product_vals.keys():
product_val = product_vals[product_id]
if product_val:
product_val['date_product_by_location_update'] = date_product_by_location_update
product_obj.write(cr, uid, product_id, product_val, context)
end_time = datetime.now()
duration_seconds = (end_time - start_time)
duration
|
= '{sec}'.format(sec=duration_seconds)
_logger.info(u'update_product_by_location get in {duration}'.format(duration=duration))
return True
def create_product_by_location(self, cr, location_name, context):
model_id = self.pool['ir.model.data'].get_object_reference(cr, SUPERUSER_ID, 'product', 'model_product_product')[1]
fields_value = {
'field_description': location_name,
'groups': [[6, False, []]],
'model_id': model_id,
'name': 'x_{location_name}'.format(location_name=location_name).lower().replace(' ', '_'),
'readonly': False,
'required': False,
'select_level': '0',
'serialization_field_id': False,
'translate': False,
'ttype': 'float',
}
context_field = context.copy()
context_field.update(
{
'department_id': False,
'lang': 'it_IT',
'manual': True, # required for create columns on table
'uid': 1
}
)
fields_id = self.pool['ir.model.fields'].create(cr, SUPERUSER_ID, fields_value, context_field)
return fields_id, fields_value['name']
def write(self, cr, uid, ids, vals, context=None):
context = context or self.pool['res.users'].context_get(cr, uid)
if vals.get('update_product_bylocation', False):
for location in self.browse(cr, uid, ids, context):
field_id, field_name = self.create_product_by_location(cr, location.name, context)
vals['product_related_columns'] = field_name
return super(stock_location, self).write(cr, uid, ids, vals, context)
|
alephu5/Soundbyte
|
environment/lib/python3.3/site-packages/matplotlib/backends/backend_qt4.py
|
Python
|
gpl-3.0
| 31,660
| 0.008654
|
import math
import os
import re
import signal
import sys
import matplotlib
from matplotlib import verbose
from matplotlib.cbook import is_string_like, onetrue
from matplotlib.backend_bases import RendererBase, GraphicsContextBase, \
FigureManagerBase, FigureCanvasBase, NavigationToolbar2, IdleEvent, \
cursors, TimerBase
from matplotlib.backend_bases import ShowBase
from matplotlib._pylab_helpers import Gcf
from matplotlib.figure import Figure
from matplotlib.mathtext import MathTextParser
from matplotlib.widgets import SubplotTool
try:
import matplotlib.backends.qt4_editor.figureoptions as figureoptions
except ImportError:
figureoptions = None
from .qt4_compat import QtCore, QtGui, _getSaveFileName, __version__
backend_version = __version__
def fn_name(): return sys._getframe(1).f_code.co_name
DEBUG = False
cursord = {
cursors.MOVE : QtCore.Qt.SizeAllCursor,
cursors.HAND : QtCore.Qt.PointingHandCursor,
cursors.POINTER : QtCore.Qt.ArrowCursor,
cursors.SELECT_REGION : QtCore.Qt.CrossCursor,
}
def draw_if_interactive():
"""
Is called after every pylab drawing command
"""
if matplotlib.is_interactive():
figManager = Gcf.get_active()
if figManager != None:
figManager.canvas.draw_idle()
def _create_qApp():
"""
Only one qApp can exist at a time, so check before creating one.
"""
if QtGui.QApplication.startingUp():
if DEBUG: print("Starting up QApplication")
global qApp
app = QtGui.QApplication.instance()
if app is None:
# check for DISPLAY env variable on X11 build of Qt
if hasattr(QtGui, "QX11Info"):
display = os.environ.get('DISPLAY')
if display is None or not re.search(':\d', display):
raise RuntimeError('Invalid DISPLAY variable')
qApp = QtGui.QApplication( [" "] )
QtCore.QObject.connect( qApp, QtCore.SIGNAL( "lastWindowClosed()" ),
qApp, QtCore.SLOT( "quit()" ) )
else:
qApp = app
class Show(ShowBase):
def mainloop(self):
# allow KeyboardInterrupt exceptions to close the plot window.
signal.signal(signa
|
l.SIGINT, signal.SIG_
|
DFL)
QtGui.qApp.exec_()
show = Show()
def new_figure_manager( num, *args, **kwargs ):
"""
Create a new figure manager instance
"""
thisFig = Figure(*args, **kwargs)
return new_figure_manager_given_figure(num, thisFig)
def new_figure_manager_given_figure(num, figure):
"""
Create a new figure manager instance for the given figure.
"""
canvas = FigureCanvasQT(figure)
manager = FigureManagerQT(canvas, num)
return manager
class TimerQT(TimerBase):
'''
Subclass of :class:`backend_bases.TimerBase` that uses Qt4 timer events.
Attributes:
* interval: The time between timer events in milliseconds. Default
is 1000 ms.
* single_shot: Boolean flag indicating whether this timer should
operate as single shot (run once and then stop). Defaults to False.
* callbacks: Stores list of (func, args) tuples that will be called
upon timer events. This list can be manipulated directly, or the
functions add_callback and remove_callback can be used.
'''
def __init__(self, *args, **kwargs):
TimerBase.__init__(self, *args, **kwargs)
# Create a new timer and connect the timeout() signal to the
# _on_timer method.
self._timer = QtCore.QTimer()
QtCore.QObject.connect(self._timer, QtCore.SIGNAL('timeout()'),
self._on_timer)
self._timer_set_interval()
def __del__(self):
# Probably not necessary in practice, but is good behavior to disconnect
try:
TimerBase.__del__(self)
QtCore.QObject.disconnect(self._timer,
QtCore.SIGNAL('timeout()'), self._on_timer)
except RuntimeError:
# Timer C++ object already deleted
pass
def _timer_set_single_shot(self):
self._timer.setSingleShot(self._single)
def _timer_set_interval(self):
self._timer.setInterval(self._interval)
def _timer_start(self):
self._timer.start()
def _timer_stop(self):
self._timer.stop()
class FigureCanvasQT( QtGui.QWidget, FigureCanvasBase ):
keyvald = { QtCore.Qt.Key_Control : 'control',
QtCore.Qt.Key_Shift : 'shift',
QtCore.Qt.Key_Alt : 'alt',
QtCore.Qt.Key_Meta : 'super',
QtCore.Qt.Key_Return : 'enter',
QtCore.Qt.Key_Left : 'left',
QtCore.Qt.Key_Up : 'up',
QtCore.Qt.Key_Right : 'right',
QtCore.Qt.Key_Down : 'down',
QtCore.Qt.Key_Escape : 'escape',
QtCore.Qt.Key_F1 : 'f1',
QtCore.Qt.Key_F2 : 'f2',
QtCore.Qt.Key_F3 : 'f3',
QtCore.Qt.Key_F4 : 'f4',
QtCore.Qt.Key_F5 : 'f5',
QtCore.Qt.Key_F6 : 'f6',
QtCore.Qt.Key_F7 : 'f7',
QtCore.Qt.Key_F8 : 'f8',
QtCore.Qt.Key_F9 : 'f9',
QtCore.Qt.Key_F10 : 'f10',
QtCore.Qt.Key_F11 : 'f11',
QtCore.Qt.Key_F12 : 'f12',
QtCore.Qt.Key_Home : 'home',
QtCore.Qt.Key_End : 'end',
QtCore.Qt.Key_PageUp : 'pageup',
QtCore.Qt.Key_PageDown : 'pagedown',
}
# define the modifier keys which are to be collected on keyboard events.
# format is: [(modifier_flag, modifier_name, equivalent_key)
_modifier_keys = [
(QtCore.Qt.MetaModifier, 'super', QtCore.Qt.Key_Meta),
(QtCore.Qt.AltModifier, 'alt', QtCore.Qt.Key_Alt),
(QtCore.Qt.ControlModifier, 'ctrl', QtCore.Qt.Key_Control)
]
_ctrl_modifier = QtCore.Qt.ControlModifier
if sys.platform == 'darwin':
# in OSX, the control and super (aka cmd/apple) keys are switched, so
# switch them back.
keyvald.update({
QtCore.Qt.Key_Control : 'super', # cmd/apple key
QtCore.Qt.Key_Meta : 'control',
})
_modifier_keys = [
(QtCore.Qt.ControlModifier, 'super', QtCore.Qt.Key_Control),
(QtCore.Qt.AltModifier, 'alt', QtCore.Qt.Key_Alt),
(QtCore.Qt.MetaModifier, 'ctrl', QtCore.Qt.Key_Meta),
]
_ctrl_modifier = QtCore.Qt.MetaModifier
# map Qt button codes to MouseEvent's ones:
buttond = {QtCore.Qt.LeftButton : 1,
QtCore.Qt.MidButton : 2,
QtCore.Qt.RightButton : 3,
# QtCore.Qt.XButton1 : None,
# QtCore.Qt.XButton2 : None,
}
def __init__( self, figure ):
if DEBUG: print('FigureCanvasQt: ', figure)
_create_qApp()
QtGui.QWidget.__init__( self )
FigureCanvasBase.__init__( self, figure )
self.figure = figure
self.setMouseTracking( True )
self._idle = True
# hide until we can test and fix
#self.startTimer(backend_IdleEvent.milliseconds)
w,h = self.get_width_height()
self.resize( w, h )
def __timerEvent(self, event):
# hide until we can test and fix
self.mpl_idle_event(event)
def enterEvent(self, event):
FigureCanvasBase.enter_notify_event(self, event)
def leaveEvent(self, event):
QtGui.QApplication.restoreOverrideCursor()
FigureCanvasBase.leave_notify_event(self, event)
def mousePressEvent( self, event ):
x = event.pos().x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.pos().y()
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasBase.button_press_event( self, x, y, button )
if DEBUG: print('button presse
|
GoogleCloudPlatform/magic-modules
|
mmv1/provider/ansible/test_gcp_session.py
|
Python
|
apache-2.0
| 6,426
| 0.002023
|
# -*- coding: utf-8 -*-
# (c) 2019, Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or
|
modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but W
|
ITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from pytest import importorskip
from units.compat import unittest
from units.compat.mock import patch
from contextlib import contextmanager
from ansible.module_utils.gcp_utils import GcpSession
import responses
import tempfile
importorskip("requests")
importorskip("google.auth")
importorskip("responses")
from google.auth.credentials import AnonymousCredentials
class FakeModule(object):
def __init__(self, params):
self.params = params
def fail_json(self, **kwargs):
raise kwargs["msg"]
class GcpSessionTestCase(unittest.TestCase):
success_json = {"status": "SUCCESS"}
user_agent = "Google-Ansible-MM-mock"
url = "http://www.googleapis.com/compute/test_instance"
@contextmanager
def setup_auth(self):
"""
This is a context manager that mocks out
the google-auth library and uses the built-in
AnonymousCredentials for sending requests.
"""
with patch(
"google.oauth2.service_account.Credentials.from_service_account_file"
) as mock:
with patch.object(
AnonymousCredentials, "with_scopes", create=True
) as mock2:
creds = AnonymousCredentials()
mock2.return_value = creds
mock.return_value = creds
yield
@responses.activate
def test_get(self):
responses.add(responses.GET, self.url, status=200, json=self.success_json)
with self.setup_auth():
module = FakeModule(
{
"scopes": "foo",
"service_account_file": "file_name",
"project": "test_project",
"auth_kind": "serviceaccount",
}
)
session = GcpSession(module, "mock")
resp = session.get(self.url)
assert responses.calls[0].request.headers["User-Agent"] == self.user_agent
assert resp.json() == self.success_json
assert resp.status_code == 200
@responses.activate
def test_post(self):
responses.add(responses.POST, self.url, status=200, json=self.success_json)
with self.setup_auth():
body = {"content": "some_content"}
module = FakeModule(
{
"scopes": "foo",
"service_account_file": "file_name",
"project": "test_project",
"auth_kind": "serviceaccount",
}
)
session = GcpSession(module, "mock")
resp = session.post(
self.url, body=body, headers={"x-added-header": "my-header"}
)
# Ensure Google header added.
assert responses.calls[0].request.headers["User-Agent"] == self.user_agent
# Ensure all content was passed along.
assert responses.calls[0].request.headers["x-added-header"] == "my-header"
# Ensure proper request was made.
assert resp.json() == self.success_json
assert resp.status_code == 200
@responses.activate
def test_delete(self):
responses.add(responses.DELETE, self.url, status=200, json=self.success_json)
with self.setup_auth():
body = {"content": "some_content"}
module = FakeModule(
{
"scopes": "foo",
"service_account_file": "file_name",
"project": "test_project",
"auth_kind": "serviceaccount",
}
)
session = GcpSession(module, "mock")
resp = session.delete(self.url)
# Ensure Google header added.
assert responses.calls[0].request.headers["User-Agent"] == self.user_agent
# Ensure proper request was made.
assert resp.json() == self.success_json
assert resp.status_code == 200
@responses.activate
def test_put(self):
responses.add(responses.PUT, self.url, status=200, json=self.success_json)
with self.setup_auth():
body = {"content": "some_content"}
module = FakeModule(
{
"scopes": "foo",
"service_account_file": "file_name",
"project": "test_project",
"auth_kind": "serviceaccount",
}
)
session = GcpSession(module, "mock")
resp = session.put(self.url, body={"foo": "bar"})
# Ensure Google header added.
assert responses.calls[0].request.headers["User-Agent"] == self.user_agent
# Ensure proper request was made.
assert resp.json() == self.success_json
assert resp.status_code == 200
@responses.activate
def test_patch(self):
responses.add(responses.PATCH, self.url, status=200, json=self.success_json)
with self.setup_auth():
body = {"content": "some_content"}
module = FakeModule(
{
"scopes": "foo",
"service_account_file": "file_name",
"project": "test_project",
"auth_kind": "serviceaccount",
}
)
session = GcpSession(module, "mock")
resp = session.patch(self.url, body={"foo": "bar"})
# Ensure Google header added.
assert responses.calls[0].request.headers["User-Agent"] == self.user_agent
# Ensure proper request was made.
assert resp.json() == self.success_json
assert resp.status_code == 200
|
ndncomm/mini-ndn
|
ndn/experiments/integration_tests.py
|
Python
|
gpl-3.0
| 3,981
| 0.004019
|
# -*- Mode:python; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (C) 2015 The University of Memphis,
# Arizona Board of Regents,
# Regents of the University of California.
#
# This file is part of Mini-NDN.
# See AUTHORS.md for a complete list of Mini-NDN authors and contributors.
#
# Mini-NDN is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mini-NDN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mini-NDN, e.g., in COPYING.md file.
# If not, see <http://www.gnu.org/licenses/>.
from ndn.experiments.experiment import Experiment
from mininet.clean import sh
import os
class IntegrationTests(Experiment):
def __init__(self, args):
Experiment.__init__(self, args)
def setup(self):
print "Creating SSH keys"
sh("mkdir -p /tmp/minindn")
sh("rm -f /tmp/minindn/ssh_host_rsa_key")
sh("ssh-keygen -q -t rsa -N '' -f /tmp/minindn/ssh_host_rsa_key")
sh("rm -f /tmp/minindn/id_rsa")
sh("ssh-keygen -q -t rsa -N '' -f /tmp/minindn/id_rsa")
sh("cat /tmp/minindn/id_rsa.pub > /tmp/minindn/authorized_keys")
sshd_cmd = ['/usr/sbin/sshd',
'-q',
'-o AuthorizedKeysFile=/tmp/minindn/authorized_keys',
'-o HostKey=/tmp/minindn/ssh_host_rsa_key',
'-o StrictModes=no']
for host in self.net.hosts:
# Run SSH daemon
host.cmd(sshd_cmd)
# Create a wrapper script for ssh that uses the generated key as default SSH identity
host.cmd("mkdir -p ~/bin")
homedir = host.cmd("echo -n ${HOME}")
ssh_wrapper = homedir + '/bin/ssh'
with open(ssh_wrapper, 'w') as f:
f.writelines([
'#!/bin/sh\n',
'exec /usr/bin/ssh -f -i /tmp/minindn/id_rsa -o StrictHostKeyChecking=no "$@"\n'
])
os.chmod(ssh_wrapper, 0755)
host.cmd("export PATH=\"${HOME}/bin${PATH:+:}${PATH}\"")
# Copy nfd configuration into default configuration location
host.cmd("cp %s %s" % (host.nfd.confFile, "/usr/local/etc/ndn/nfd.conf"))
if host.name == 'a':
sh("cp -r %s %s" % ("integration-tests", homedir))
# Setup static routes between a and d
self.net['a'].cmd("ip route add 192.168.3.0/24 via 192.168.2.3")
self.net['d'].cmd("ip route add 192.168.2.0/24 via 192.168.3.2")
# Enable IP forwarding on r
self.net['r'].cmd("sysctl net.ipv4.ip_forward=1")
def run(self):
# Tests are supposed to be run from host a
a = self.net['a']
a.cmd("cd ~/integration-tests")
tests = [
#"test_linkfail",
#"test_hub_discovery",
#"test_interest_loop",
#"test_interest_aggregation",
#"test_localhost_scope",
#"te
|
st_multicast_strategy",
#"test_multicast",
#"test_tcp_udp_tunnel",
#"test_localhop",
"test_unixface",
"test_ndnpeekpoke",
|
"test_route_expiration",
#"test_nfdc",
"test_ndnping",
"test_cs_freshness",
"test_nrd",
"test_fib_matching",
#"test_remote_register",
"test_ndntraffic"
]
for test in tests:
a.cmd("./run_tests.py", test, verbose=True)
Experiment.register("integration-tests", IntegrationTests)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/funnelarea/title/font/_family.py
|
Python
|
mit
| 616
| 0.001623
|
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __ini
|
t__(
self, plotly_name="family", parent_name="funnelarea.title.font", **kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit
|
_type", "plot"),
no_blank=kwargs.pop("no_blank", True),
role=kwargs.pop("role", "style"),
strict=kwargs.pop("strict", True),
**kwargs
)
|
mcvmcv/wurstboard
|
tests.py
|
Python
|
gpl-2.0
| 1,285
| 0.004669
|
#!flask/bin/python
import os
import unittest
from config import basedir
from app import app, db
from app.models import User
class TestCase(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'test.db')
self.app = app.test_client()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def test_avatar(self):
u = User(nickname='john',email='john@example.com')
avatar = u.avatar(128)
expected = 'http://www.gravatar.com/avatar/d4c74594d841139328695756648b6bd6'
assert avatar[0:len(expected)] == expected
def test_make_unique_nickname(self):
u = User(nickname='john',email='john@example.com')
db.session.add(u)
db.session.commit()
nickname = User.make_unique_nickname('john')
assert nickname != 'john'
u = User(nickname=nickname, email='susan@example.com')
db.session
|
.add(u)
db.session
|
.commit()
nickname2 = User.make_unique_nickname('john')
assert nickname2 != 'john'
assert nickname2 != nickname
if __name__ == '__main__':
unittest.main()
|
treasure-data/luigi-td
|
setup.py
|
Python
|
apache-2.0
| 778
| 0
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="luigi-td",
version='0.6.10.dev0',
description="Luigi integration for Treasure Data",
author="Treasure Data, Inc.",
author_email="support@treasure-data.com",
url="https://github.com/treasure-data/luigi-td",
install_requires=open("requirements.txt").read().splitlines(),
packages=find_packages(),
|
license="Apache Licen
|
se 2.0",
platforms="Posix; MacOS X; Windows",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Software Development",
],
)
|
Azure/azure-sdk-for-python
|
sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_database_automatic_tuning_operations.py
|
Python
|
mit
| 10,488
| 0.003719
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str,
server_name: str,
database_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/automaticTuning/current')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Di
|
ct[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _S
|
ERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_update_request(
resource_group_name: str,
server_name: str,
database_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/automaticTuning/current')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
class DatabaseAutomaticTuningOperations(object):
"""DatabaseAutomaticTuningOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.sql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
resource_group_name: str,
server_name: str,
database_name: str,
**kwargs: Any
) -> "_models.DatabaseAutomaticTuning":
"""Gets a database's automatic tuning.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseAutomaticTuning, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.DatabaseAutomaticTuning
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DatabaseAutomaticTuning"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
server_name=server_name,
database_name=database_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DatabaseAutomaticTuning', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/automaticTuning/current'} # type: ignore
@distributed_trace
def update(
self,
resource_group_name: str,
server_name: str,
database_name: str,
parameters: "_models.DatabaseAutomaticTuning",
**kwargs: Any
) -> "_models.DatabaseAutomaticTuning":
"""Update automatic tuning properties for target database.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_n
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/PyKDE4/kdeui/KTimeComboBox.py
|
Python
|
gpl-2.0
| 3,363
| 0.011299
|
# encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python2.7/dist-packages/PyKDE4/kdeui.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
impo
|
rt PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
from KComboBox import KComboBox
class KTimeComboBox(KComboBox):
# no doc
def assignTime(self, *args, **kwargs): # real signature unknown
pass
def displayFormat(self, *args, **kwargs): # real signature unknown
pass
def eventFilter(self, *args, **kwargs): # real signature unknown
pass
def focusInEvent(self, *args, **kwargs): # real signature unkno
|
wn
pass
def focusOutEvent(self, *args, **kwargs): # real signature unknown
pass
def hidePopup(self, *args, **kwargs): # real signature unknown
pass
def isNull(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def keyPressEvent(self, *args, **kwargs): # real signature unknown
pass
def maximumTime(self, *args, **kwargs): # real signature unknown
pass
def minimumTime(self, *args, **kwargs): # real signature unknown
pass
def mousePressEvent(self, *args, **kwargs): # real signature unknown
pass
def options(self, *args, **kwargs): # real signature unknown
pass
def resetMaximumTime(self, *args, **kwargs): # real signature unknown
pass
def resetMinimumTime(self, *args, **kwargs): # real signature unknown
pass
def resetTimeRange(self, *args, **kwargs): # real signature unknown
pass
def resizeEvent(self, *args, **kwargs): # real signature unknown
pass
def setDisplayFormat(self, *args, **kwargs): # real signature unknown
pass
def setMaximumTime(self, *args, **kwargs): # real signature unknown
pass
def setMinimumTime(self, *args, **kwargs): # real signature unknown
pass
def setOptions(self, *args, **kwargs): # real signature unknown
pass
def setTime(self, *args, **kwargs): # real signature unknown
pass
def setTimeList(self, *args, **kwargs): # real signature unknown
pass
def setTimeListInterval(self, *args, **kwargs): # real signature unknown
pass
def setTimeRange(self, *args, **kwargs): # real signature unknown
pass
def showPopup(self, *args, **kwargs): # real signature unknown
pass
def time(self, *args, **kwargs): # real signature unknown
pass
def timeChanged(self, *args, **kwargs): # real signature unknown
pass
def timeEdited(self, *args, **kwargs): # real signature unknown
pass
def timeEntered(self, *args, **kwargs): # real signature unknown
pass
def timeList(self, *args, **kwargs): # real signature unknown
pass
def timeListInterval(self, *args, **kwargs): # real signature unknown
pass
def wheelEvent(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
EditTime = 1
ForceTime = 4
Option = None # (!) real value is ''
Options = None # (!) real value is ''
SelectTime = 2
WarnOnInvalid = 8
|
gkotton/vmware-nsx
|
vmware-nsx/neutron/tests/unit/vmware/extensions/test_portsecurity.py
|
Python
|
apache-2.0
| 1,831
| 0.000546
|
# Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.common import test_lib
from neutron.plugins.vmware.common import sync
from neutron.tests.unit import test_extension_portsecurity as psec
from neutron.tests.unit import vmware
from neutron.tests.unit.vmware.apiclient import fake
class PortSecurityTestCase(psec.PortSecurityDBTestCase):
def setUp(self):
test_lib.test_config['config_files'] = [
vmware.get_fake_conf('nsx.ini.test')]
# mock api client
self.fc = fake.FakeClient(vmware.STUBS_PATH)
self.mock_nsx = mock.patch(vmware.NSXAPI_NAME, autospec=True)
instance = self.mock_nsx.start()
instance.return_value.login.return_value = "the_cookie"
# Avoid runs of the synchronizer looping call
patch_sync = mock.patch.object(sync, '_start_loopingcall')
|
patch_sync.start()
instance.return_value.request.side_effect = self.fc.fake_request
super(PortSecurityTestCase, self).set
|
Up(vmware.PLUGIN_NAME)
self.addCleanup(self.fc.reset_all)
self.addCleanup(self.mock_nsx.stop)
self.addCleanup(patch_sync.stop)
class TestPortSecurity(PortSecurityTestCase, psec.TestPortSecurity):
pass
|
tuos/FlowAndCorrelations
|
flowCorr/cmssw5320/FlowCorr/test/ConfFile_cfg.py
|
Python
|
mit
| 2,206
| 0.031732
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("Demo")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
#process.MessageLogger.cerr.FwkReport.reportEvery = 100
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
'file:/afs/cern.ch/user/t/tuos/work/private/hin1600x/flowPbPb/CMSSW_5_3_20/src/hiMB_1001_1_irY.root'
)
)
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.GlobalTag.globaltag = 'GR_R_53_LV6::All'
from HeavyIonsAnalysis.Configuration.CommonFunctions_cff import *
overrideCentrality(process)
process.HeavyIonGlobalParameters = cms.PSet(
centralityVariable = cms.string("HFtowers"),
# nonDefaultGlauberModel = cms.string("Hydjet_2760GeV"),
centralitySrc = cms.InputTag("hiCentrality")
)
import HLTrigger.HLTfilters.hltHighLevel_cfi
process.hltHIMB = HLTrigger.HLTfilters.hltHighLevel_cfi.hltHighLevel.clone()
process.hltHIMB.HLTPaths = ['HLT_HIMinBiasHfOrBSC_*'] # for allphysics
process.hltHIMB.andOr
|
= cms.bool(True)
process.hltHIMB.throw = cms.bool(False)
process.TFileService = cms.Service("TFileService",
|
fileName=cms.string("flowCorr2760_data.root")
)
process.flowCorr = cms.EDAnalyzer('FlowCorr',
EvtPlane = cms.InputTag("hiEvtPlane"),
EvtPlaneFlat = cms.InputTag("hiEvtPlaneFlat",""),
HiMC = cms.InputTag("heavyIon"),
Vertex = cms.InputTag("hiSelectedVertex"),
#Track = cms.InputTag("hiGeneralTracks"),
Track = cms.InputTag("hiGeneralAndPixelTracks"),
TrackQuality = cms.string('highPurity'),
Tower = cms.InputTag("towerMaker"),
trackPtMinCut = cms.double(0.3),
trackPtMaxCut = cms.double(12.0),
trackEtaCut = cms.double(2.4),
trackEtaMinCut = cms.double(0.0),
ptErrCut = cms.double(0.1),
dzRelCut = cms.double(3.0),
dxyRelCut = cms.double(3.0)
)
process.p = cms.Path(process.flowCorr)
|
hanula/pypkg_template
|
pypkg_template/tests/test_foo.py
|
Python
|
bsd-2-clause
| 449
| 0
|
import unittest
from nose.tools import assert
|
_equal
class TestBar(unittest.TestCase):
def call_FUT(self, count):
from
|
pypkg_template.foo import bar
return bar(count)
def test_friday_sunday(self):
for day in (5, 6):
assert_equal(self.call_FUT(day), "I'm in a bar")
def test_workday(self):
for day in list(range(1, 5)) + [7]:
assert_equal(self.call_FUT(day), "No bar tonight")
|
konstruktoid/ansible-upstream
|
lib/ansible/modules/network/eos/eos_config.py
|
Python
|
gpl-3.0
| 19,251
| 0.001454
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: eos_config
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage Arista EOS configuration sections
description:
- Arista EOS configurations use a simple block indent file syntax
for segmenting configuration into sections. This module provides
an implementation for working with EOS configuration sections in
a deterministic way. This module works with either CLI or eAPI
transports.
extends_documentation_fragment: eos
notes:
- Tested against EOS 4.15
- Abbreviated commands are NOT idempotent, see
L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
options:
lines:
description:
- The ordered set of commands that should be configured in the
section. The commands must be the exact same commands as found
in the device running-config. Be sure to note the configuration
command syntax as some commands are automatically modified by the
device config parser.
aliases: ['commands']
parents:
description:
- The ordered set of parents that uniquely identify the section or hierarchy
the commands should be checked against. If the parents argument
is omitted, the commands are checked against the set of top
level or global commands.
src:
description:
- The I(src) argument provides a path to the configuration file
to load into the remote system. The path can either be a full
system path to the configuration file if the value starts with /
or relative to the root of the implemented role or playbook.
This argument is mutually exclusive with the I(lines) and
I(parents) arguments. It can be a Jinja2 template as well.
src file must have same indentation as a live switch config.
Arista EOS device config has 3 spaces indentation.
version_added: "2.2"
before:
description:
- The ordered set of commands to push on to the command stack if
a change needs to be made. This allows the playbook designer
the opportunity to perform configuration commands prior to pushing
any changes without affecting how the set of commands are matched
against the system.
after:
description:
- The ordered set of commands to append to the end of the command
stack if a change needs to be made. Just like with I(before) this
allows the playbook designer to append a set of commands to be
executed after the command set.
match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If
match is set to I(line), commands are matched line by line. If
match is set to I(strict), command lines are matched with respect
to position. If match is set to I(exact), command lines
must be an equal match. Finally, if match is set to I(none), the
module will not attempt to compare the source configuration with
the running configuration on the remote device.
|
default: line
|
choices: ['line', 'strict', 'exact', 'none']
replace:
description:
- Instructs the module on the way to perform the configuration
on the device. If the replace argument is set to I(line) then
the modified lines are pushed to the device in configuration
mode. If the replace argument is set to I(block) then the entire
command block is pushed to the device in configuration mode if any
line is not correct.
default: line
choices: ['line', 'block', 'config']
force:
description:
- The force argument instructs the module to not consider the
current devices running-config. When set to true, this will
cause the module to push the contents of I(src) into the device
without first checking if already configured.
- Note this argument should be considered deprecated. To achieve
the equivalent, set the C(match=none) which is idempotent. This argument
will be removed in Ansible 2.6.
type: bool
default: 'no'
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory or role root directory, if
playbook is part of an ansible role. If the directory does not exist,
it is created.
type: bool
default: 'no'
version_added: "2.2"
running_config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(running_config) argument allows the
implementer to pass in the configuration to use as the base
config for this module.
aliases: ['config']
version_added: "2.4"
defaults:
description:
- The I(defaults) argument will influence how the running-config
is collected from the device. When the value is set to true,
the command used to collect the running-config is append with
the all keyword. When the value is set to false, the command
is issued without the all keyword
type: bool
default: 'no'
version_added: "2.2"
save:
description:
- The C(save) argument instructs the module to save the
running-config to startup-config. This operation is performed
after any changes are made to the current running config. If
no changes are made, the configuration is still saved to the
startup config. This option will always cause the module to
return changed.
- This option is deprecated as of Ansible 2.4 and will be removed
in Ansible 2.8, use C(save_when) instead.
type: bool
default: 'no'
version_added: "2.2"
save_when:
description:
- When changes are made to the device running-configuration, the
changes are not copied to non-volatile storage by default. Using
this argument will change that before. If the argument is set to
I(always), then the running-config will always be copied to the
startup-config and the I(modified) flag will always be set to
True. If the argument is set to I(modified), then the running-config
will only be copied to the startup-config if it has changed since
the last save to startup-config. If the argument is set to
I(never), the running-config will never be copied to the
startup-config. If the argument is set to I(changed), then the running-config
will only be copied to the startup-config if the task has made a change.
I(changed) was added in Ansible 2.5.
default: never
choices: ['always', 'never', 'modified', 'changed']
version_added: "2.4"
diff_against:
description:
- When using the C(ansible-playbook --diff) command line argument
the module can generate diffs
|
Quantipy/quantipy
|
quantipy/core/tools/qp_decorators.py
|
Python
|
mit
| 6,656
| 0.003456
|
from decorator import decorator
from inspect import getargspec
# ------------------------------------------------------------------------
# decorators
# ------------------------------------------------------------------------
def lazy_property(func):
"""Decorator that makes a property lazy-evaluated.
"""
attr_name = '_lazy_' + func.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, func(self))
return getattr(self, attr_name)
return _lazy_property
def verify(variables=None, categorical=None, text_keys=None, axis=None, is_str=None):
"""
Decorator to verify arguments.
"""
@decorator
def _var_in_ds(func, *args, **kwargs):
all_args = getargspec(func)[0]
ds = args[0]
for variable, collection in variables.items():
nested = False
if collection.endswith('_nested'):
nested = True
collection = collection.split('_')[0]
# get collection for argument
if collection == 'both':
collection = ['columns', 'masks']
else:
collection = [collection]
c = [key for col in collection for key in ds._meta[col].keys()]
# get the variable argument to check
v_index = all_args.index(variable)
var = kwargs.get(variable, args[v_index])
if var is None:
return func(*args, **kwargs)
if not isinstance(var, list):
var = [var]
if nested:
valid = []
for v in var:
if ' > ' in v:
valid.extend(v.replace(' ', '').split('>'))
else:
valid.append(v)
else:
valid = var
# check the variable
not_valid = [v for v in valid if not v in c + ['@']]
if not_valid:
msg = "'{}' argument for {}() must be in {}.\n"
msg += '{} is not in {}.'
msg = msg.format(variable, func.func_name, collection,
not_valid, collection)
raise KeyError(msg)
return func(*args, **kwargs)
@decorator
def _var_is_cat(func, *args, **kwargs):
all_args = getargspec(func)[0]
ds = args[0]
for cat in categorical:
# get the variable argument to check if it is categorical
v_index = all_args.index(cat)
var = kwargs.get(cat, args[v_index])
if var is None: return func(*args, **kwargs)
if not isinstance(var, list): var = [var]
|
valid = []
for v in var:
if ' > ' in v:
valid.extend(v.replace(' ',
|
'').split('>'))
elif not '@' == v:
valid.append(v)
# check if varaibles are categorical
not_cat = [v for v in valid if not ds._has_categorical_data(v)]
if not_cat:
msg = "'{}' argument for {}() must reference categorical "
msg += 'variable.\n {} is not categorical.'
msg = msg.format(cat, func.func_name, not_cat)
raise ValueError(msg)
return func(*args, **kwargs)
@decorator
def _verify_text_key(func, *args, **kwargs):
all_args = getargspec(func)[0]
ds = args[0]
for text_key in text_keys:
# get the text_key argument to check
tk_index = all_args.index(text_key)
tks = kwargs.get(text_key, args[tk_index])
if tks is None: return func(*args, **kwargs)
if not isinstance(tks, list): tks = [tks]
# ckeck the text_key
valid_tks = ds.valid_tks
not_supported = [tk for tk in tks if not tk in valid_tks]
if not_supported:
msg = "{} is not a valid text_key! Supported are: \n {}"
raise ValueError(msg.format(not_supported, valid_tks))
return func(*args, **kwargs)
@decorator
def _verify_axis(func, *args, **kwargs):
# get the axis argument to check
all_args = getargspec(func)[0]
ax_index = all_args.index(axis)
a_edit = kwargs.get(axis, args[ax_index])
if a_edit is None: return func(*args, **kwargs)
if not isinstance(a_edit, list): a_edit = [a_edit]
# ckeck the axis
valid_ax = ['x', 'y']
not_supported = [ax for ax in a_edit if not ax in valid_ax]
if not_supported:
msg = "{} is not a valid axis! Supported are: {}"
raise ValueError(msg.format(not_supported, valid_ax))
return func(*args, **kwargs)
@decorator
def _is_str(func, *args, **kwargs):
all_args = getargspec(func)[0]
for val in is_str:
# get the arguments to modify
val_index = all_args.index(val)
v = kwargs.get(val, args[val_index])
if not isinstance(v, (list, tuple)): v = [v]
if not all(isinstance(text, (str, unicode)) for text in v):
raise ValueError('Included value must be str or list of str.')
return func(*args, **kwargs)
@decorator
def _deco(func, *args, **kwargs):
p = [variables, categorical, text_keys, axis, is_str]
d = [_var_in_ds, _var_is_cat, _verify_text_key, _verify_axis, _is_str]
for arg, dec in reversed(zip(p, d)):
if arg is None: continue
func = dec(func)
return func(*args, **kwargs)
if categorical and not isinstance(categorical, list): categorical = [categorical]
if text_keys and not isinstance(text_keys, list): text_keys = [text_keys]
if is_str and not isinstance(is_str, list): is_str = [is_str]
return _deco
def modify(to_list=None):
"""
Decorator to modify arguments.
"""
@decorator
def _to_list(func, *args, **kwargs):
all_args = getargspec(func)[0]
for val in to_list:
# get the arguments to modify
val_index = all_args.index(val)
v = kwargs.get(val, args[val_index])
if v is None: v = []
if not isinstance(v, list): v = [v]
if kwargs.get(val):
kwargs[val] = v
else:
args = tuple(a if not x == val_index else v
for x, a in enumerate(args))
return func(*args, **kwargs)
if to_list:
if not isinstance(to_list, list): to_list = [to_list]
return _to_list
|
frossigneux/python-kwrankingclient
|
kwrankingclient/v1/shell_commands/nodes.py
|
Python
|
apache-2.0
| 1,816
| 0
|
# Copyright (c) 2014 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from kwrankingclient import command
class ListNodes(command.ListCommand):
"""Print a list of node status."""
resource = 'node'
log = logging.getLogger(__name__ + '.ListNodes')
list_columns = ['node', 'status']
def get_parser(self, prog_name):
parser = super(ListNodes, self).get_parser(prog_name)
parser.add_argument(
'--sor
|
t-by', metavar="<node_column>",
help='column name used to sort result',
default='node'
)
return parser
class ShowNode(command.ShowCommand):
"""Show node status."""
resource = 'node'
json_indent = 4
allow_names = False
log = logging
|
.getLogger(__name__ + '.ShowNode')
class UpdateNode(command.UpdateCommand):
"""Update node status."""
resource = 'node'
allow_names = False
log = logging.getLogger(__name__ + '.UpdateNode')
def get_parser(self, prog_name):
parser = super(UpdateNode, self).get_parser(prog_name)
parser.add_argument(
'--status', metavar='<STATUS>',
help='New status for the node'
)
return parser
def args2body(self, parsed_args):
params = {'status': 'ranking'}
return params
|
Bystroushaak/abclinuxuapi
|
tests/test_blogpost.py
|
Python
|
mit
| 4,767
| 0.001049
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
impo
|
rt os.path
import pytest
import abclinuxuapi
from abclinuxuapi import shared
@pytest.fixture
def bp_ur
|
l():
return "http://www.abclinuxu.cz/blog/bystroushaak/2015/2/bolest-proxy"
@pytest.fixture
def do_that_fucking_monkey_patch(monkeypatch):
def mock_download(*args, **kwargs):
fn = os.path.join(os.path.dirname(__file__), "mock_data/blogpost.html")
with open(fn) as f:
return f.read()
monkeypatch.setattr(abclinuxuapi.blogpost, "download", mock_download)
def setup_module(do_that_fucking_monkey_patch):
"""
It is not possiblel to import monkeypatch from pytest. You have to use it
as fixture.
"""
BPOST = abclinuxuapi.Blogpost(bp_url(), lazy=False)
@pytest.fixture
def bpost():
"""
This may seem a little bit crazy, but this speeds up the testing 6x.
I don't need new object for each test.
"""
return BPOST
def test_constructor(bp_url):
bp = abclinuxuapi.Blogpost(bp_url)
assert bp.url == bp_url
assert bp.uid is None
assert bp.title is None
assert bp.intro is None
assert bp.text is None
assert bp.rating is None
assert bp.comments == []
assert bp.comments_n == -1
assert bp.created_ts is None
assert bp.last_modified_ts is None
assert bp.object_ts > 0
def test_constructor_multi_params(bp_url):
bp = abclinuxuapi.Blogpost(
url=bp_url,
uid="uid",
title="title",
intro="intro",
text="text",
rating="rating",
comments="comments",
comments_n="comments_n",
created_ts="created_ts",
last_modified_ts="last_modified_ts",
object_ts="object_ts"
)
assert bp.url == bp_url
assert bp.uid == "uid"
assert bp.title == "title"
assert bp.intro == "intro"
assert bp.text == "text"
assert bp.rating == "rating"
assert bp.comments == "comments"
assert bp.comments_n == "comments_n"
assert bp.created_ts == "created_ts"
assert bp.last_modified_ts == "last_modified_ts"
assert bp.object_ts == "object_ts"
def test_constructor_wrong_params(bp_url):
with pytest.raises(TypeError):
bp = abclinuxuapi.Blogpost(bp_url, azgabash=True)
def test_get_title(bpost):
assert bpost.title == "Bolest proxy"
def test_get_text(bpost):
assert bpost.text.startswith("<h2>Bolest proxy</h2>")
assert "Written in CherryTree" in bpost.text
assert "bystrousak:" in bpost.text
def test_Tag():
tag = abclinuxuapi.Tag("hello", norm="_hello_")
assert tag == "hello"
assert tag.norm == "_hello_"
assert tag.url.startswith("http")
def test_tags(bpost):
assert bpost.tags
assert "proxy" in bpost.tags
# try to add and remove tag
new_tag = abclinuxuapi.Tag("nábytek", "nabytek")
bpost.remove_tag(new_tag, throw=False)
assert new_tag not in bpost.tags
bpost.add_tag(new_tag)
assert new_tag in bpost.tags
bpost.remove_tag(new_tag, throw=False)
def test_get_uid(bpost):
assert bpost.uid == 400957
def test_get_rating(bpost):
assert bpost.rating
assert bpost.rating.rating == 100
assert bpost.rating.base == 15
def test_meta_parsing(bpost):
assert bpost.has_tux
assert bpost.created_ts == 1423587660.0
assert bpost.last_modified_ts >= 1423591140.0
assert bpost.readed >= 1451
def test_get_image_urls(bpost):
assert bpost.get_image_urls()
assert bpost.get_image_urls()[0] == (
"https://www.abclinuxu.cz/images/screenshots/0/9/"
"210590-bolest-proxy-6017333664768008869.png"
)
def test_different_date_parsing():
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/clanky/yubikey.-co-to-je-a-co-to-umi-1",
lazy=False
)
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/clanky/bezpecnost/ssl-je-vase-bezpecne-pripojeni-opravdu-zabezpecene",
lazy=False
)
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/blog/jarasa/2016/10/i-pejsek-musi-jist-kvalitne",
lazy=False
)
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/blog/msk/2016/8/hlada-sa-linux-embedded-vyvojar",
lazy=False
)
blog = abclinuxuapi.Blogpost(
"http://abclinuxu.cz/blog/Strider_BSD_koutek/2006/8/objevil-jsem-ameriku",
lazy=False
)
assert len(blog.comments) == 0
blog = abclinuxuapi.Blogpost(
"http://www.abclinuxu.cz/blog/tucnak_viktor/2005/1/zdravim-nahodne-navstevniky",
lazy=False
)
blog = abclinuxuapi.Blogpost(
"https://www.abclinuxu.cz/blog/luv/2016/4/mockgeofix-mock-geolokace-kompatibilni-s-android-emulatorem",
lazy=False
)
assert len(blog.comments) == 0
|
lawki/get_flash_videos_assembler
|
create_exec_.py
|
Python
|
gpl-3.0
| 628
| 0.007962
|
#!usr/bin/python
try:
f = open("link.txt","r")
except IOError:
print "ERROR!\n"
exit
o = open("download_all_files.sh","w")
for line in f:
if len(line)==0
|
:
continue
file_name = line.split("/")
file_name = file_name[len(file_name)-1]
file_name = "\""+file_name[:len(file_name)-1]+".mp4\""
command = "if test -f "+file_name+"\nthen\n continue\n else\n get_flash_videos "+line[:len(line)-1]+" -f "+file_name+""+"\nfi\n"+"if test $? -ne 0\n then\n if test -f "+file_name+"\n then rm "+file_name+"\nfi\n echo \""+line[:len(lin
|
e)-1]+"\">>\"problem_downloading.txt\"\nfi\n"
o.write(command)
|
bossiernesto/uLisp
|
uLisp/parser/uLispParser.py
|
Python
|
bsd-3-clause
| 2,979
| 0.004028
|
"""
BNF reference: http://theory.lcs.mit.edu/~rivest/sexp.txt
<sexp> :: <string> | <list>
<string> :: <display>? <simple-string> ;
<simple-string> :: <raw> | <token> | <base-64> | <hexadecimal> |
<quoted-string> ;
<display> :: "[" <simple-string> "]" ;
<raw> :: <decimal> ":" <bytes> ;
<decimal> :: <decimal-digit>+ ;
-- decimal numbers should have no unnecessary leading zeros
<bytes> -- any string of bytes, of the indicated length
<token> :: <tokenchar>+ ;
<base-64> :: <decimal>? "|" ( <base-64-char> | <whitespace> )* "|" ;
<hexadecimal> :: "#" ( <hex-digit> | <white-space> )* "#" ;
<quoted-string> :: <decimal>? <quoted-string-body>
<quoted-string-body> :: "\"" <bytes> "\""
<list> :: "(" ( <sexp> | <whitespace> )* ")" ;
<whitespace> :: <whitespace-char>* ;
<token-char> :: <alpha> | <decimal-digit> | <simple-punc> ;
<alpha> :: <upper-case> | <lower-case> | <digit> ;
<lower-case> :: "a" | ... | "z" ;
<upper-case> :: "A" | ... | "Z" ;
<decimal-digit> :: "0" | ... | "9" ;
<hex-digit> :: <decimal-digit> | "A" | ... | "F" | "a" | ... | "f" ;
<simple-punc> :: "-" | "." | "/" | "_" | ":" | "*" | "+" | "=" ;
<whitespace-char> :: " " | "\t" | "\r" | "\n" ;
<base-64-char> :: <alpha> | <decimal-digit> | "+" | "/" | "=" ;
<null> :: "" ;
"""
from pyparsing import *
from base64 import b64decode
import pprint
def verifyLen(s, l, t):
t = t[0]
if t.len is not None:
t1len = len(t[1])
if t1len != t.len:
raise ParseFatalException(s, l, "invalid data of length %d, expected %s" % (t1len, t.len))
return t[1]
# define punctuation literals
LPAR, RPAR, LBRK, RBRK, LBRC, RBRC, VBAR = map(Suppress, "()[]{}|")
decimal = Regex(r'0|[1-9]\d*').setParseAction(lambda t: int(t[0]))
hexadecimal = ("#" + OneOrMore(Word(hexnums)) + "#") \
.setParseAction(lambda t: int("".join(t[1:-1]), 16))
bytes = Word(printables)
raw = Group(decimal("len") + Suppress(":") + bytes).setParseAction(verifyLen)
token = Word(alphanums + "-./_:*+=")
base64_ = Group(
Optional(decimal | hexadecimal, defaul
|
t=None)("len") + VBAR + OneOrMore(Word(alphanums + "+/=")).setParseAction(
lambda t: b64decode("".join(t))) + VBAR).setParseAction(verifyLen)
qString = Group(Optional(decimal, default=None)("len") +
dblQuotedString.setParseAction(removeQuotes)).setParseAction(verifyLen)
simpleString = base64_ | raw | decimal | token | hexadecimal | qString
# extended definitions
decimal = Regex(r'-?0|[1-9]\d*').setParseAction(lambda t: int(t[0]))
real = Regex
|
(r"[+-]?\d+\.\d*([eE][+-]?\d+)?").setParseAction(lambda tokens: float(tokens[0]))
token = Word(alphanums + "-./_:*+=!<>")
simpleString = real | base64_ | raw | decimal | token | hexadecimal | qString
display = LBRK + simpleString + RBRK
string_ = Optional(display) + simpleString
uLisp_parse = Forward()
sexpList = Group(LPAR + ZeroOrMore(uLisp_parse) + RPAR)
uLisp_parse << ( string_ | sexpList )
|
grap/OpenUpgrade
|
addons/stock/stock.py
|
Python
|
agpl-3.0
| 273,917
| 0.005732
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import date, datetime
from dateutil import relativedelta
import json
import time
from openerp.osv import fields, osv
from openerp.tools.float_utils import float_compare, float_round
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT
from openerp.exceptions import Warning
from openerp import SUPERUSER_ID, api
import openerp.addons.decimal_precision as dp
from openerp.addons.procurement import procurement
import logging
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
#----------------------------------------------------------
class stock_incoterms(osv.osv):
_name = "stock.incoterms"
_description = "Incoterms"
_columns = {
'name': fields.char('Name', required=True, help="Incoterms are series of sales terms. They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."),
'code': fields.char('Code', size=3, required=True, help="Incoterm Standard Code"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM you will not use."),
}
_defaults = {
'active': True,
}
#----------------------------------------------------------
# Stock Location
#----------------------------------------------------------
class stock_location(osv.osv):
_name = "stock.location"
_description = "Inventory Locations"
_parent_name = "location_id"
_parent_store = True
_parent_order = 'name'
_order = 'parent_left'
_rec_name = 'complete_name'
def _location_owner(self, cr, uid, location, context=None):
''' Return the company owning the location if any '''
return location and (location.usage == 'internal') and location.company_id or False
def _complete_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for m in self.browse(cr, uid, ids, context=context):
res[m.id] = m.name
parent = m.location_id
while parent:
res[m.id] = parent.name + ' / ' + res[m.id]
parent = parent.location_id
return res
def _get_sublocations(self, cr, uid, ids, context=None):
""" return all sublocations of the given stock locations (included) """
if context is None:
context = {}
context_with_inactive = context.copy()
context_with_inactive['active_test'] = False
return self.search(cr, uid, [('id', 'child_of', ids)], context=context_with_inactive)
def _name_get(self, cr, uid, location, context=None):
name = location.name
while location.location_id and location.usage != 'view':
location = location.location_id
name = location.name + '/' + name
return name
def name_get(self, cr, uid, ids, context=None):
res = []
for location in self.browse(cr, uid, ids, context=context):
res.append((location.id, self._name_get(cr, uid, location, context=context)))
return res
_columns = {
'name': fields.char('Location Name', required=True, translate=True),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."),
'usage': fields.selection([
('supplier', 'Supplier Location'),
('view', 'View'),
('internal', 'Internal Location'),
('customer', 'Customer Location'),
('inventory', 'Inventory'),
('procurement', 'Procurement'),
('production', 'Production'),
('transit', 'Transit Location')],
'Location Type', required=True,
help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers
\n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products
\n* Internal Location: Physical locations inside your own warehouses,
\n* Customer Location: Virtual location representing the destination location for products sent to your customers
\n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories)
\n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running.
\n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products
\n* Transit Location: Counterpart location that should be used in inter-companies or inter-warehouses operations
""", select=True),
'complete_name': fields.function(_complete_name, type='char', string="Location Name",
store={'stock.location': (_get_sublocations, ['name', 'location_id', 'active'], 10)}),
'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'),
'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'),
'partner_id': fields.many2one('res.partner', 'Owner', help="Owner of the location if not internal"),
|
'comment': fields.text('Additional Information'),
'posx': fields.integer('Corridor (X)', help="Optional localization details, for information purpose only"),
'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"),
'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('R
|
ight Parent', select=1),
'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between companies'),
'scrap_location': fields.boolean('Is a Scrap Location?', help='Check this box to allow using this location to put scrapped/damaged goods.'),
'removal_strategy_id': fields.many2one('product.removal', 'Removal Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to take the products from, which lot etc. for this location. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."),
'putaway_strategy_id': fields.many2one('product.putaway', 'Put Away Strategy', help="Defines the default method used for suggesting the
|
heibanke/python_do_something
|
Code/Chapter5/meta_04.py
|
Python
|
apache-2.0
| 1,022
| 0.016634
|
#!/usr/bin/env python
# coding: utf-8
#http://python-3-patterns-idioms-test.readthedocs.org/en/latest/Metaprogramming.html
class RegisterClasses(type):
def __init__(cls, name, bases, atts):
super(RegisterClasses, cls).__init__(name, bases, atts)
if not hasattr(cls, 'registry'):
cls.registry = set()
cls.registry.add(cls)
cls.registry -= set(bases)
|
# Remove base classes
# Metamethods, called on class objects:
def __iter__(cls):
return iter(cls.registry)
def __str__(cls):
if cls in cls.registry:
return cls.__name__
return cls.__name__ + ": " + ", ".join([sc.__name__ for sc in cls])
class Shape(object):
__metaclass__ = RegisterClasses
class Round(Shape): pass
class Square(Shape): pass
class Triangular(Shape): pass
class Boxy(Shape): pass
print Shape
class Circle(Round): pa
|
ss
class Ellipse(Round): pass
print Shape
for s in Shape: # Iterate over subclasses
print s
|
CodeCarrots/warsztaty
|
sesja07/bigmeal.py
|
Python
|
cc0-1.0
| 955
| 0.007592
|
"""
Prosta klasa reprezentująca posiłek składający się z wielu innych
obiektów jadalnych.
"""
class BigMeal:
def __init__(self, edibles):
# TODO: zainicjuj obiekt przekazaną listą obiektów jadalnych
# "edibles"
def get_name(self):
# TODO: zaimplementuj metodę zwracającą nazwę obiektu
# składającą się z połączonych spójnikiem "i" nazw wszystkich
# składowych obiektów jadalnych
def get_calories(self):
# TODO: zaimplementuj metodę zwracającą wartość energetyczną
# całego posiłku (jako sumę kalorii składowych obiektów jadalnych)
if __name__ == '__main__':
import food
# proste testy
apple = Food("Jablko", 70)
carrot = Food("Marchewka", 80)
|
banana = Food("Banan", 60)
fruitmix = BigMeal([apple, carrot, banana])
print (fruitmix.get_name()) # "Jablko i Marchewka i Banan"
|
print (fruitmix.get_calories()) # 210
|
JeroenBosmans/nabu
|
nabu/distributed/local_cluster.py
|
Python
|
mit
| 1,506
| 0.001992
|
'''@file main.py
this function is used to run distributed training on a local cluster'''
import os
import atexit
import subprocess
import tensorflow as tf
def local_cluster(expdir, class_type):
'''main function'''
#read the cluster file
clusterfile = os.path.join(expdir, 'cluster', 'cluster')
machines = dict()
machines['worker'] = []
machines['ps'] = []
with open(cluste
|
rfile) as fid:
for line
|
in fid:
if len(line.strip()) > 0:
split = line.strip().split(',')
machines[split[0]].append(
(split[1], int(split[2]), split[3]))
#start all the jobs
processes = []
for job in machines:
task_index = 0
for _ in machines[job]:
processes.append(subprocess.Popen(
['python', '-u', 'train_%s.py' % class_type,
'--clusterfile=%s' % clusterfile,
'--job_name=%s' % job, '--task_index=%d' % task_index,
'--ssh_command=None', '--expdir=%s' % expdir]))
task_index += 1
for process in processes:
atexit.register(process.terminate)
for process in processes:
process.wait()
if __name__ == '__main__':
tf.app.flags.DEFINE_string('expdir', 'expdir', 'The experiments directory')
tf.app.flags.DEFINE_string('type', 'asr',
'one of asr or lm, the training type')
FLAGS = tf.app.flags.FLAGS
local_cluster(FLAGS.expdir, FLAGS.type)
|
windmill/windmill
|
windmill/dep/_mozrunner/__init__.py
|
Python
|
apache-2.0
| 7,224
| 0.006506
|
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Mozilla Corporation Code.
#
# The Initial Developer of the Original Code is
# Mikeal Rogers.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mikeal Rogers <mikeal.rogers@gmail.com>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or
|
later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
|
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import optparse
import os, sys
import shutil
from time import sleep
from windmill.dep import _simplesettings
import global_settings
import runner
import install
settings_env = 'MOZRUNNER_SETTINGS_FILE'
def main():
"""Command Line main function."""
parser = optparse.OptionParser()
parser.add_option("-s", "--settings", dest="settings",
help="Settings file for mozrunner.", metavar="MOZRUNNER_SETTINGS_FILE")
parser.add_option("-n", "--new-profile", dest="new_profile", action="store_true",
help="Create fresh profile.", metavar="MOZRUNNER_NEW_PROFILE")
parser.add_option("-b", "--binary", dest="binary",
help="Binary path.", metavar=None)
parser.add_option("-d", "--default-profile", dest="default-profile",
help="Default profile path.", metavar=None)
parser.add_option('-p', "--profile", dest="profile",
help="Profile path.", metavar=None)
parser.add_option('-w', "--plugins", dest="plugins",
help="Plugin paths to install.", metavar=None)
(options, args) = parser.parse_args()
settings_path = getattr(options, 'settings', None)
if settings_path is not None:
settings_path = os.path.abspath(os.path.expanduser(settings_path))
os.environ[settings_env] = settings_path
settings = simplesettings.initialize_settings(global_settings, sys.modules[__name__],
local_env_variable=settings_env)
option_overrides = [('new_profile', 'MOZILLA_CREATE_NEW_PROFILE',),
('binary', 'MOZILLA_BINARY',),
('profile', 'MOZILLA_PROFILE',),
('default-profile', 'MOZILLA_DEFAULT_PROFILE',),
]
for opt, override in option_overrides:
if getattr(options, opt, None) is not None:
settings[override] = getattr(options, opt)
moz = get_moz_from_settings(settings)
# if len(args) is not 0:
# objdir = args.pop(0)
#
#
moz.start()
print 'Started:', ' '.join(moz.command)
try:
moz.wait()
except KeyboardInterrupt:
moz.stop()
if settings['MOZILLA_CREATE_NEW_PROFILE']:
shutil.rmtree(settings['MOZILLA_PROFILE'])
else:
install.clean_prefs_file(os.path.join(settings['MOZILLA_PROFILE'], 'prefs.js'))
def get_moz(binary, profile, runner_class=runner.Firefox, cmd_args=[], prefs={},
enable_default_prefs=True, settings=None, create_new_profile=True,
plugins=None):
"""Get the Mozilla object from options, settings dict overrides kwargs"""
if settings is None:
settings = simplesettings.initialize_settings(
global_settings, sys.modules[__name__],
local_env_variable=settings_env,
)
sys.modules[__name__].settings = settings
binary = os.path.abspath(binary)
# Handle .app case
if binary.endswith('.app'):
apppath = binary
binary = os.path.abspath(os.path.join(apppath, 'Contents', 'MacOS', 'firefox-bin'))
profile = os.path.abspath(os.path.join(apppath, 'Contents', 'MacOS', 'defaults', 'profile'))
if settings.get('MOZILLA_CREATE_NEW_PROFILE', create_new_profile):
if not settings.has_key('MOZILLA_CREATE_NEW_PROFILE'):
settings['MOZILLA_CREATE_NEW_PROFILE'] = create_new_profile
settings['MOZILLA_DEFAULT_PROFILE'] = profile
if settings['MOZILLA_CREATE_NEW_PROFILE']:
if settings['MOZILLA_DEFAULT_PROFILE'] is None:
raise Exception ('No default or local profile has been set.')
install.create_tmp_profile(settings)
profile = settings['MOZILLA_PROFILE']
else:
settings['MOZILLA_PROFILE'] = profile
settings['MOZILLA_CREATE_NEW_PROFILE'] = create_new_profile
if settings.get('MOZILLA_PLUGINS', plugins) is not None:
if not settings.has_key('MOZILLA_PLUGINS'):
settings['MOZILLA_PLUGINS'] = plugins
if settings.has_key('MOZILLA_PLUGINS'):
install.install_plugins(settings, runner_class)
install.set_preferences(profile, prefs, enable_default_prefs)
return runner_class(binary, profile, cmd_args=cmd_args, env=settings.get('MOZILLA_ENV', None))
def get_moz_from_settings(settings=None, runner_class=runner.Firefox):
"""Get Mozilla object from a settings dict. If one is not passed a default settings dict is created."""
if settings is None:
settings = simplesettings.initialize_settings(
global_settings, sys.modules[__name__],
local_env_variable=settings_env,
)
sys.modules[__name__].settings = settings
from windmill.dep import mozrunner
mozrunner.settings = settings
return get_moz(settings['MOZILLA_BINARY'], settings['MOZILLA_DEFAULT_PROFILE'],
prefs=settings['MOZILLA_PREFERENCES'],
runner_class=runner_class,
settings=settings,
enable_default_prefs=settings.get('MOZILLA_ENABLE_DEFAULT_PREFS', True),
cmd_args=settings['MOZILLA_CMD_ARGS'])
|
xzturn/caffe2
|
caffe2/python/net_printer.py
|
Python
|
apache-2.0
| 12,712
| 0.000393
|
## @package net_printer
# Module caffe2.python.net_printer
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.proto.caffe2_pb2 import OperatorDef, NetDef
from caffe2.python.checkpoint import Job
from caffe2.python.core import Net, ExecutionStep, Plan
from caffe2.python.task import Task, TaskGroup, WorkspaceType, TaskOutput
from collections import defaultdict
from contextlib import contextmanager
from copy import copy
from future.utils import viewkeys
from itertools import chain
from six import binary_type, text_type
class Visitor(object):
@classmethod
def register(cls, Type):
if not(hasattr(cls, 'visitors')):
cls.visitors = []
def _register(func):
cls.visitors.append((Type, func))
return func
return _register
def __call__(self, obj, *args, **kwargs):
if obj is None:
return
for Type, func in self.__class__.visitors:
if isinstance(obj, Type):
return func(self, obj, *args, **kwargs)
raise TypeError('%s: unsupported object type: %s' % (
self.__class__.__name__, type(obj)))
class Analyzer(Visitor):
PREFIXES_TO_IGNORE = {'distributed_ctx_init'}
def __init__(self):
self.workspaces = defaultdict(lambda: defaultdict(lambda: 0))
self.workspace_ctx = []
@property
def workspace(self):
return self.workspace_ctx[-1]
@contextmanager
def set_workspace(self, node=None, ws=None, do_copy=False):
if ws is not None:
ws = ws
elif node is not None:
ws = self.workspaces[str(node)]
else:
ws = self.workspace
if do_copy:
ws = copy(ws)
self.workspace_ctx.append(ws)
yield ws
del self.workspace_ctx[-1]
def define_blob(self, blob):
self.workspace[blob] += 1
def need_blob(self, blob):
if any(blob.startswith(p) for p in Analyzer.PREFIXES_TO_IGNORE):
return
assert blob in self.workspace, 'Blob undefined: %s' % blob
@Analyzer.register(OperatorDef)
def analyze_op(analyzer, op):
for x in op.input:
analyzer.need_blob(x)
for x in op.output:
analyzer.define_blob(x)
@Analyzer.register(Net)
def analyze_net(analyzer, net):
for x in net.Proto().op:
analyzer(x)
@Analyzer.register(ExecutionStep)
def analyze_step(analyzer, step):
proto = step.Proto()
with analyzer.set_workspace(do_copy=proto.create_workspace):
if proto.report_net:
with analyzer.set_workspace(do_copy=True):
analyzer(step.get_net(proto.report_net))
all_new_blobs = set()
substeps = step.Substeps() + [step.get_net(n) for n in proto.network]
for substep in substeps:
with analyzer.set_workspace(
do_copy=proto.concurrent_substeps) as ws_in:
analyzer(substep)
if proto.should_stop_blob:
analyzer.need_blob(proto.should_stop_blob)
if proto.concurrent_substeps:
new_blobs = set(viewkeys(ws_in)) - set(viewkeys(analyzer.workspace))
assert len(all_new_blobs & new_blobs) == 0, (
'Error: Blobs created by multiple parallel steps: %s' % (
', '.join(all_new_blobs & new_blobs)))
all_new_blobs |= new_blobs
for x in all_new_blobs:
analyzer.define_blob(x)
@Analyzer.register(Task)
def analyze_task(analyzer, task):
# check that our plan protobuf is not too large (limit of 64Mb)
step = task.get_step()
plan = Plan(task.node)
plan.AddStep(step)
proto_len = len(plan.Proto().SerializeToString())
assert proto_len < 2 ** 26, (
'Due to a protobuf limitation, serialized tasks must be smaller '
'than 64Mb, but this task has {} bytes.' % proto_len)
is_private = task.workspace_type() != WorkspaceType.GLOBAL
with analyzer.set_workspace(do_copy=is_private):
analyzer(step)
@Analyzer.register(TaskGroup)
def analyze_task_group(analyzer, tg):
for task in tg.tasks_by_node().tasks():
with analyzer.set_workspace(node=task.node):
analyzer(task)
@Analyzer.register(Job)
def analyze_job(analyzer, job):
analyzer(job.init_group)
analyzer(job.epoch_group)
def analyze(obj):
"""
Given a Job, visits all the execution steps making sure that:
- no undefined blobs will be found during excution
- no blob with same name is defined in concurrent steps
"""
Analyzer()(obj)
class Text(object):
def __init__(self):
self._indent = 0
self._lines_in_context = [0]
self.lines = []
@contextmanager
def context(self, text):
if text is not None:
self.add('with %s:' % text)
self._indent += 4
self._lines_in_context.append(0)
yield
if text is not None:
if self._lines_in_context[-1] == 0:
self.add('pass')
self._indent -= 4
del self._lines_in_context[-1]
def add(self, text):
self._lines_in_context[-1] += 1
self.lines.append((' ' * self._indent) + text)
def __str__(self):
return '\n'.join(self.lines)
class Printer(Visitor, Text):
def __init__(self, factor_prefixes=False, c2_syntax=True):
super(Visitor, self).__init__()
super(Text, self).__init__()
self.factor_prefixes = factor_prefixes
self.c2_syntax = c2_syntax
self.c2_net_name = None
def _sanitize_str(s):
if isinstance(s, text_type):
sanitized = s
elif isinstance(s, binary_type):
sanitized = s.decode('ascii', errors='ignore')
else:
sanitized = str(s)
if len(sanitized) < 64:
return "'%s'" % sanitized
else:
return "'%s'" % sanitized[:64] + '...<+len=%d>' % (len(sanitized) - 64)
def _arg_val(arg):
if arg.HasField('f'):
return str(arg.f)
if arg.HasField('i'):
return str(arg.i)
if arg.HasField('s'):
return _sanitize_str(arg.s)
if arg.floats:
return str(list(arg.floats))
if arg.ints:
return str(list(arg.ints))
if arg.strings:
return str([_sanitize_str(s) for s in arg.strings])
return '[]'
def commonprefix(m):
"Given a list of strings, returns the longest common prefix"
if not m:
return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
def format_value(val):
if isinstance(val, list):
return '[%s]' % ', '.join("'%s'" % str(v) for v in val)
else:
return str(val)
def factor_prefix(vals, do_it):
vals = [format_value(v) for v in vals]
prefix = commonprefix(vals) if len(vals) > 1 and do_it else ''
joined = ', '.join(v[len(prefix):] for v in vals)
return '%s[%s]' % (prefix, joined) if prefix else joined
def call(op, inputs=None, outputs=None, factor_prefixes=False):
if not inputs:
inputs = ''
else:
inputs_v = [a for a in inputs if not isinstance(a, tuple)]
inputs_kv = [a for a in inputs if isinstance(a, tuple)]
inputs = ', '.join(
x
|
for x in chain(
[factor_prefix(inputs_v, factor_prefixes)],
('%s=%s' % kv for kv in inputs_kv),
)
if x
)
call = '%s(%s)' % (op, inputs)
return call if not outputs else '%s = %s' % (
factor_prefix(outputs, factor_prefixes), call)
def format_device_option(dev_opt):
if not dev_opt or not (
dev_opt.device_type or dev_opt.cuda_gpu_id or dev_opt.node_name):
return None
retur
|
n call(
'DeviceOption',
[dev_opt.device_type, dev_opt.cuda_gpu_id, "'%s'" % dev_opt.node_name])
@Printer.register(OperatorDef)
def print_op(text, op):
args = [(a.name, _arg_val(a)) for a in op.arg]
dev_opt_txt = format_device_option(op.device_option)
if dev_opt_txt:
|
Akrog/cinder
|
cinder/api/common.py
|
Python
|
apache-2.0
| 15,127
| 0
|
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import urllib
from oslo_config import cfg
import six.moves.urllib.parse as urlparse
import webob
from cinder.api.openstack import wsgi
from cinder.api import xmlutil
from cinder.i18n import _
from cinder.openstack.common import log as logging
from cinder import utils
api_common_opts = [
cfg.IntOpt('osapi_max_limit',
default=1000,
help='The maximum number of items that a collection '
'resource returns in a single response'),
cfg.StrOpt('osapi_volume_base_URL',
default=None,
help='Base URL that will be presented to users in links '
'to the OpenStack Volume API',
deprecated_name='osapi_compute_link_prefix'),
]
CONF = cfg.CONF
CONF.register_opts(api_common_opts)
LOG = logging.getLogger(__name__)
XML_NS_V1 = 'http://docs.openstack.org/api/openstack-block-storage/1.0/content'
XML_NS_V2 = 'http://docs.openstack.org/api/openstack-block-storage/2.0/content'
# Regex that matches alphanumeric characters, periods, hyphens,
# colons and underscores:
# ^ assert position at start of the string
# [\w\.\-\:\_] match expression
# $ assert position at end of the string
VALID_KEY_NAME_REGEX = re.compile(r"^[\w\.\-\:\_]+$", re.UNICODE)
def validate_key_names(key_names_list):
"""Validate each item of the list to match key name regex."""
for key_name in key_names_list:
if not VALID_KEY_NAME_REGEX.match(key_name):
return False
return True
def get_pagination_params(request):
"""Return marker, limit tuple from request.
:param request: `wsgi.Request` possibly containing 'marker' and 'limit'
GET variables. 'marker' is the id of the last element
the client has seen, and 'limit' is the maximum number
of items to return. If 'limit' is not specified, 0, or
> max_limit, we default to max_limit. Negative values
for either marker or limit will cause
exc.HTTPBadRequest() exceptions to be raised.
"""
params = {}
if 'limit' in request.GET:
params['limit'] = _get_limit_param(request)
if 'marker' in request.GET:
params['marker'] = _get_marker_param(request)
return params
def _get_limit_param(request):
"""Extract integer limit from request or fail."""
try:
limit = int(request.GET['limit'])
except ValueError:
msg = _('limit param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _('limit param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _get_marker_param(request):
"""Extract marker id from request or fail."""
return request.GET['marker']
def limited(items, request, max_limit=CONF.osapi_max_limit):
"""Return a slice of items according to requested offset and limit.
:param items: A sliceable entity
:param request: ``wsgi.Request`` possibly containing 'offset' and 'limit'
GET variables. 'offset' is where to start in the list,
and 'limit' is the maximum number of items to return. If
'limit' is not specified, 0, or > max_limit, we default
to max_limit. Negative values for either offset or limit
will cause exc.HTTPBadRequest() exceptions to be raised.
:kwarg max_limit: The maximum number of items to return from 'items'
"""
try:
offset = int(request.GET.get('offset', 0))
except ValueError:
msg = _('offset param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
limit = int(request.GET.get('limit', max_limit))
except ValueError:
msg = _('limit param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _('limit param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
if offset < 0:
msg = _('offset param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
limit = min(max_limit, limit or max_limit)
range_end = offset + limit
return items[offset:range_end]
def limited_by_marker(items, request, max_limit=CONF.osapi_max_limit):
"""Return a slice of items according to the requested marker and limit."""
params = get_pagination_params(request)
limit = params.get('limit', max_limit)
marker = params.get('marker')
limit = min(max_limit, limit)
start_index = 0
if marker:
start_index = -1
for i, item in enumerate(items):
if 'flavorid' in item:
if item['flavorid'] == marker:
start_index = i + 1
break
elif item['id'] == marker or item.get('uuid') == marker:
start_index = i + 1
break
if start_index < 0:
msg = _('marker [%s] not found') % marker
raise webob.exc.HTTPBadRequest(explanation=msg)
range_end = start_index + limit
return items[start_index:range_end]
def get_sort_params(params, default_key='created_at', default_dir='desc'):
"""Retrieves sort keys/directions parameters.
Processes the parameters to create a list of sort keys and sort directions
that correspond to either the 'sort' parameter or the 'sort_key' and
'sort_dir' parameter values. The value of the 'sort' parameter is a comma-
separated list of sort keys, each key is optionally appended with
':<sort_direction>'.
Note that the 'sort_key' and 'sort_dir' parameters are deprecated in kilo
and an exception is raised if they are supplied with the 'sort' parameter.
The sort parameters are removed from the request parameters by this
function.
:param params: webob.multidict of request parameters (from
cinder.api.openstack.wsgi.Request.params)
:param default_key: default sort key value, added to the list if no
sort keys are supplied
:param default_dir: default sort dir value, added to the list if the
corresponding key does not have a direction
specified
:returns: list of sort keys, list of sort dirs
:raise we
|
bob.exc.HTTPBadRequest: If both 'sort' and either 'sort_key' or
|
'sort_dir' are supplied parameters
"""
if 'sort' in params and ('sort_key' in params or 'sort_dir' in params):
msg = _("The 'sort_key' and 'sort_dir' parameters are deprecated and "
"cannot be used with the 'sort' parameter.")
raise webob.exc.HTTPBadRequest(explanation=msg)
sort_keys = []
sort_dirs = []
if 'sort' in params:
for sort in params.pop('sort').strip().split(','):
sort_key, _sep, sort_dir = sort.partition(':')
if not sort_dir:
sort_dir = default_dir
sort_keys.append(sort_key.strip())
sort_dirs.append(sort_dir.strip())
else:
sort_key = params.pop('sort_key', default_key)
sort_dir = params.pop('sort_dir', default_dir)
sort_keys.append(sort_key.strip())
sort_dirs.append(sort_dir.strip())
return sort_keys, sort_dirs
def remove_version_from_href(href):
"""Removes the first api version from the href.
Given: 'http://www.cinder.com/v1.1/123'
Returns: 'http:/
|
mpolden/jarvis2
|
jarvis/jobs/calendar.py
|
Python
|
mit
| 1,462
| 0.000684
|
# -*- coding: utf-8 -*-
import os
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
from datetime import datetime
from jobs import AbstractJob
class Calendar(AbstractJob):
def __init__(self, conf):
self.interval = conf["interval"]
self.timeout = conf.get("timeout")
def _auth(self):
credentials_file = os.path.abspath(
os.path.join(os.path.dirname(__file__), ".calendar.json")
)
storage = Storage(credentials_file)
credentials = storage.get()
http = httplib2.Http(timeout=self.timeout)
http = credentials.authorize(http)
self.service = build(serviceName="calendar", version="v3", http=http)
def _parse(self, items):
events = []
for item in items:
date = item["start"].get("dateTime") or item["start"].get("date")
events.append({"id": item["id"], "summary": item["summary"], "date": date})
return events
def get(self):
if not hasattr(self, "service"):
self._auth()
now = datetime.utc
|
now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
result = (
self.service.events()
.list(
calendarId="
|
primary",
orderBy="startTime",
singleEvents=True,
timeMin=now,
)
.execute()
)
return {"events": self._parse(result["items"])}
|
crmccreary/openerp_server
|
openerp/addons/procurement/wizard/orderpoint_procurement.py
|
Python
|
agpl-3.0
| 2,927
| 0.003416
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# Order Point Method:
# - Order if the virtual stock of today is bellow the min of the defined order point
#
import threading
import pooler
from osv import fields,osv
class procurement_compute(osv.osv_memory):
_name = 'procurement.orderpoint.compute'
_description = 'Automatic Order Point'
_columns = {
'automatic': fields.boolean('Automatic Orderpoint', help='If the stock of a product is under 0, it will act like an orderpoint'),
}
_defaults = {
'automatic': False,
}
def _procure_calculation_orderpoint(self, cr, uid, ids, context=None):
"""
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
"""
proc_obj = self.pool.get('procurement.order')
#As this function is in a new thread, I need to open a new cursor, because the old one may be closed
new_cr = pooler.get_db(cr.dbname).cursor()
for proc in self.browse(new_cr, uid, ids, context=context):
proc_obj._procure_orderpoint_confirm(new_cr, uid, automat
|
ic=proc.automatic, use_new_cursor=new_cr.dbname, context=context)
#close the new cursor
new_cr.close()
return {}
def procure_calculation(self, cr, uid, ids, context=None):
"""
@param self: The object
|
pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
"""
threaded_calculation = threading.Thread(target=self._procure_calculation_orderpoint, args=(cr, uid, ids, context))
threaded_calculation.start()
return {'type': 'ir.actions.act_window_close'}
procurement_compute()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ellak-monades-aristeias/enhydris
|
enhydris/permissions/tests.py
|
Python
|
agpl-3.0
| 1,553
| 0.007083
|
import unittest
from django.contrib.auth.models import User, Group
from enhydris.hcore.models import Gentity
class PermissionsTestCase(unittest.TestCase):
def setUp(self):
self.object = Gentity.objects.create(name='testgent')
self.object.save()
self.user = User.objects.create(username='testuser')
|
self.user.save()
self.group = Group.objects.create(name='testgroup')
self.group.save()
def tearDown(self):
self.object.delete()
self.user.delete()
self.group.delete()
def testUserPerms(self):
assert self.user.has_row_perm(self.object, 'permission') == False
self.user.add_row_perm(self.object, 'permission')
assert self.user.has_row_perm(self.object, 'permissi
|
on') == True
self.user.del_row_perm(self.object, 'permission')
assert self.user.has_row_perm(self.object, 'permission') == False
def testGroupPerms(self):
assert self.user.has_row_perm(self.object, 'permission') == False
assert self.group.has_row_perm(self.object, 'permission') == False
self.group.user_set.add(self.user)
self.group.add_row_perm(self.object, 'permission')
assert self.group.has_row_perm(self.object, 'permission') == True
assert self.user.has_row_perm(self.object, 'permission') == True
self.group.del_row_perm(self.object, 'permission')
assert self.user.has_row_perm(self.object, 'permission') == False
assert self.group.has_row_perm(self.object, 'permission') == False
|
rssenar/PyToolkit
|
ValidateFiles.py
|
Python
|
bsd-2-clause
| 2,072
| 0.034749
|
#!/usr/bin/env python3.4
# ---------------------------------------------------------------------------- #
import csv, os
# ---------------------------------------------------------------------------- #
def Validate():
Message1 = 'VALIDATED!!!'
Message2 = '''
$$$$$$$$\
$$ | $$$$$$\ $$$$$$\ $$$$$$\ $$$$$$\
$$$$$\ $$ __$$\$$ __$$\$$ __$$\$$ __$$\
$$ __| $$ | \__$$ | \__$$ / $$ $$ | \__|
$$$$$$$$\$$ | $$ | \$$$$$$ $$ |
\________\__| \__| \______/\__|
'''
os.chdir('../../../../Desktop/')
Entries = set()
File1 = str.lower(input('Enter File 1 : '))
File2 = str.lower(input('Enter File 2 : '))
File3 = str.lower(input('Enter File 3 : '))
InputFile = '{}.csv'.format(File1)
DatabaseFile = '{}.csv'.format(File2)
PurchaseFile = '{}.csv'.format(File3)
# ------------------------------------------------------------------------ #
if File2 != '':
with open(DatabaseFile,'rU') as DatabaseFile:
Database = csv.reader(DatabaseFile)
next(DatabaseFile)
for line in Database:
Entries.add((line[1],line[2],line[3],line[4],line[5],line[6]))
Purchase = csv.reader(PurchaseFile)
if File3 != '':
with open(PurchaseFile,'rU') as PurchaseFile:
Purchase = csv.rea
|
der(PurchaseFile)
next(PurchaseFile)
for line in Purchase:
Entries.add((line[1],line[2],line[3],line[4],line[5],line[6]))
if File1 != '':
ErrorCounter = 0
with open(InputFile,'rU') as InputFile:
Input = csv.reader(InputFile)
next(InputFile)
for line in Input:
key = ((line[1],line[2],line[3],line[4],line[5],line[6]))
if key not in Entries:
with open("error.csv",'at') as ErrorFi
|
le:
Error = csv.writer(ErrorFile)
Error.writerow(line)
ErrorCounter += 1
if ErrorCounter > 0:
print('{} Errors Found'.format(ErrorCounter))
print(Message2)
else:
print(Message1)
# ---------------------------------------------------------------------------- #
if __name__ == '__main__':
Validate()
|
sulaweyo/torrentflux-b4rt-php7
|
html/bin/clients/fluazu/fluazu/output.py
|
Python
|
gpl-2.0
| 3,408
| 0.012911
|
################################################################################
# $Id: output.py 2552 2007-02-08 21:40:46Z b4rt $
# $Date: 2007-02-08 15:40:46 -0600 (Thu, 08 Feb 2007) $
# $Revision: 2552 $
################################################################################
# #
# LICENSE #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License (GPL) #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCH
|
ANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
#
|
#
# To read the license please visit http://www.gnu.org/copyleft/gpl.html #
# #
# #
################################################################################
# standard-imports
import sys
import time
################################################################################
""" ------------------------------------------------------------------------ """
""" getPrefix """
""" ------------------------------------------------------------------------ """
def getPrefix():
return time.strftime('[%Y/%m/%d - %H:%M:%S]') + " "
""" ------------------------------------------------------------------------ """
""" getOutput """
""" ------------------------------------------------------------------------ """
def getOutput(message):
return getPrefix() + message + "\n"
""" ------------------------------------------------------------------------ """
""" printMessage """
""" ------------------------------------------------------------------------ """
def printMessage(message):
sys.stdout.write(getOutput(message))
sys.stdout.flush()
""" ------------------------------------------------------------------------ """
""" printError """
""" ------------------------------------------------------------------------ """
def printError(message):
sys.stderr.write(getOutput(message))
sys.stderr.flush()
""" ------------------------------------------------------------------------ """
""" printException """
""" ------------------------------------------------------------------------ """
def printException():
print getPrefix(), sys.exc_info()
sys.stdout.flush()
|
yeming233/rally
|
rally/plugins/openstack/cfg/nova.py
|
Python
|
apache-2.0
| 12,529
| 0
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
OPTS = {"benchmark": [
# prepoll delay, timeout, poll interval
# "start": (0, 300, 1)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "start",
default=float(0),
help="Time to sleep after %s before polling"
" for status" % "start"),
cfg.FloatOpt("nova_server_%s_timeout" % "start",
default=float(300),
help="Server %s timeout" % "start"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "start",
default=float(1),
help="Server %s poll interval" % "start"),
# "stop": (0, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "stop",
default=float(0),
help="Time to sleep after %s before polling"
" for status" % "stop"),
cfg.FloatOpt("nova_server_%s_timeout" % "stop",
default=float(300),
help="Server %s timeout" % "stop"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "stop",
default=float(2),
help="Server %s poll interval" % "stop"),
# "boot": (1, 300, 1)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "boot",
default=float(1),
help="Time to sleep after %s before polling"
" for status" % "boot"),
cfg.FloatOpt("nova_server_%s_timeout" % "boot",
default=float(300),
help="Server %s timeout" % "boot"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "boot",
default=float(2),
help="Server %s poll interval" % "boot"),
# "delete": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "delete",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "delete"),
cfg.FloatOpt("nova_server_%s_timeout" % "delete",
default=float(300),
help="Server %s timeout" % "delete"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "delete",
default=float(2),
help="Server %s poll interval" % "delete"),
# "reboot": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "reboot",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "reboot"),
cfg.FloatOpt("nova_server_%s_timeout" % "reboot",
default=float(300),
help="Server %s timeout" % "reboot"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "reboot",
default=float(2),
help="Server %s poll interval" % "reboot"),
# "rebuild": (1, 300, 1)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "rebuild",
default=float(1),
help="Time to sleep after %s before polling"
" for status" % "rebuild"),
cfg.FloatOpt("nova_server_%s_timeout" % "rebuild",
default=float(300),
help="Server %s timeout" % "rebuild"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "rebuild",
default=float(1),
help="Server %s poll interval" % "rebuild"),
# "rescue": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "rescue",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "rescue"),
cfg.FloatOpt("nova_server_%s_timeout" % "rescue",
default=float(300),
help="Server %s timeout" % "rescue"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "rescue",
default=float(2),
help="Server %s poll interval" % "rescue"),
# "unrescue": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "unrescue",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "unrescue"),
cfg.FloatOpt("nova_server_%s_timeout" % "unrescue",
default=float(300),
help="Server %s timeout" % "unrescue"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "unrescue",
default=float(2),
help="Server %s poll interval" % "unrescue"),
# "suspend": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "suspend",
default=f
|
loat(2),
help="Time to sleep after %s before polling"
" for status" % "suspend"),
cfg.FloatOpt("nova_server_%s_timeout" % "suspend",
default=float(300),
help="Server %s timeout" % "suspend")
|
,
cfg.FloatOpt("nova_server_%s_poll_interval" % "suspend",
default=float(2),
help="Server %s poll interval" % "suspend"),
# "resume": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "resume",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "resume"),
cfg.FloatOpt("nova_server_%s_timeout" % "resume",
default=float(300),
help="Server %s timeout" % "resume"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "resume",
default=float(2),
help="Server %s poll interval" % "resume"),
# "pause": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "pause",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "pause"),
cfg.FloatOpt("nova_server_%s_timeout" % "pause",
default=float(300),
help="Server %s timeout" % "pause"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "pause",
default=float(2),
help="Server %s poll interval" % "pause"),
# "unpause": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "unpause",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "unpause"),
cfg.FloatOpt("nova_server_%s_timeout" % "unpause",
default=float(300),
help="Server %s timeout" % "unpause"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "unpause",
default=float(2),
help="Server %s poll interval" % "unpause"),
# "shelve": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "shelve",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "shelve"),
cfg.FloatOpt("nova_server_%s_timeout" % "shelve",
default=float(300),
help="Server %s timeout" % "shelve"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "shelve",
default=float(2),
help="Server %s poll interval" % "shelve"),
# "unshelve": (2, 300, 2)
cfg.FloatOpt("nova_server_%s_prepoll_delay" % "unshelve",
default=float(2),
help="Time to sleep after %s before polling"
" for status" % "unshelve"),
cfg.FloatOpt("nova_server_%s_timeout" % "unshelve",
default=float(300),
help="Server %s timeout" % "unshelve"),
cfg.FloatOpt("nova_server_%s_poll_interval" % "unshelve",
default=float(2),
|
kynikos/lib.py.configfile
|
configfile/__init__.py
|
Python
|
mit
| 60,029
| 0.000983
|
# This file is part of ConfigFile - Parse and edit configuration files.
# Copyright (C) 2011-present Dario Giovannetti <dev@dariogiovannetti.net>
# Licensed under MIT
# https://github.com/kynikos/lib.py.configfile/blob/master/LICENSE
"""
This library provides the :py:class:`ConfigFile` class, whose goal is to
provide an interf
|
ace for parsing, modifying and writing configuration files.
Main features:
* Support for subsections. Support for sectionless options (root options).
* Read from multiple sources (files, file-like objects, dictionaries or special
compatible objects) and compose them in a single :py:class:`ConfigFile`
object.
* When importing and exporting it is possible to choose what to do with
options only existing in the source, only existing in the destination, or
existing in both with different values.
|
* Import a configuration source into a particular subsection of an existing
object. Export only a particular subsection of an existing object.
* Preserve the order of sections and options when exporting. Try the best to
preserve any comments too.
* Access sections and options with the
``root('Section', 'Subsection')['option']`` syntax or the
``root('Section')('Subsection')['option']`` syntax.
* save references to subsections with e.g.
``subsection = section('Section', 'Subsection')``.
* Interpolation of option values between sections when importing.
Author: Dario Giovannetti <dev@dariogiovannetti.net>
License: MIT
GitHub: https://www.github.com/kynikos/lib.py.configfile
Issue tracker: https://www.github.com/kynikos/lib.py.configfile/issues
**Note:** as it is clear by reading this page, the documentation is still in a
poor state. If you manage to understand how this library works and want to help
documenting it, you are welcome to fork the GitHub repository and request to
pull your improvements. Everything is written in docstrings in the only
python module of the package.
Also, if you have any questions, do not hesitate to ask in the issue tracker,
or write the author an email!
Examples
========
Basic usage
-----------
Suppose you have these two files:
``/path/to/file``:
.. code-block:: cfg
root_option = demo
[Section1]
test = ok
retest = no
test3 = yes
[Section2.Section2A]
foo = fooo
[Section3]
bar = yay
``/path/to/other_file``:
.. code-block:: cfg
[Section2C]
an_option = 2
Now run this script:
::
from configfile import ConfigFile
conf = ConfigFile("/path/to/file")
conf("Section2").upgrade("path/to/other_file")
option = conf("Section2", "Section2C")["an_option"]
print(option, type(option)) # 2 <class 'str'>
option = conf("Section2")("Section2C").get_int("an_option")
print(option, type(option)) # 2 <class 'int'>
conf.export_add("/path/to/file")
conf["root_option"] = "value"
conf("Section3").export_reset("/path/to/another_file")
You will end up with these files (``/path/to/other_file`` is left
untouched):
``/path/to/file``:
.. code-block:: cfg
root_option = demo
[Section1]
test = ok
retest = no
test3 = yes
[Section2.Section2A]
foo = fooo
[Section2.Section2C]
an_option = 2
[Section3]
bar = yay
``/path/to/another_file``:
.. code-block:: cfg
bar = yay
Interpolation
-------------
Suppose you have this file:
``/path/to/file``:
.. code-block:: cfg
[Section1]
option = foo ${$:Section2$:optionA$}
[Section1.Section2]
optionA = some value
optionB = ${optionA$} test
optionC = test ${$:optionA$}
[Section3]
option = ${Section1$:Section2$:optionA$} bar
Now run this script:
::
from configfile import ConfigFile
conf = ConfigFile("/path/to/file", interpolation=True)
print(conf('Section1')['option']) # foo some value
print(conf('Section1', 'Section2')['optionA']) # some value
print(conf('Section1', 'Section2')['optionB']) # some value test
print(conf('Section1', 'Section2')['optionC']) # test some value
print(conf('Section3')['option']) # some value bar
Module contents
===============
"""
import errno
import re as re_
import collections
import io
class Section(object):
"""
The class for a section in the configuration file, including the root
section. You should never need to instantiate this class directly, use
:py:class:`ConfigFile` instead.
"""
# TODO: Compile only once (bug #20)
_PARSE_SECTION = r'^\s*\[(.+)\]\s*$'
_PARSE_OPTION = r'^\s*([^\=]+?)\s*\=\s*(.*?)\s*$'
_PARSE_COMMENT = r'^\s*[#;]{1}\s*(.*?)\s*$'
_PARSE_IGNORE = r'^\s*$'
_SECTION_SUB = r'^[a-zA-Z_]+(?:\.?[a-zA-Z0-9_]+)*$'
_SECTION_PLAIN = r'^[a-zA-Z_]+[a-zA-Z0-9_]*$'
_OPTION = r'^[a-zA-Z_]+[a-zA-Z0-9_]*$'
_VALUE = r'^.*$'
_SECTION_SEP = r'.'
_OPTION_SEP = r' = '
# "{}" will be replaced with the section name by str.format
_SECTION_MARKERS = r'[{}]'
_COMMENT_MARKER = r'# '
_INTERPOLATION_SPECIAL = '$'
_INTERPOLATION_SPECIAL_ESC = _INTERPOLATION_SPECIAL * 2
_INTERPOLATION_START = _INTERPOLATION_SPECIAL + '{'
_INTERPOLATION_SEP = _INTERPOLATION_SPECIAL + ':'
_INTERPOLATION_END = _INTERPOLATION_SPECIAL + '}'
_INTERPOLATION_SPLIT = (r'(' + r'|'.join(re_.escape(mark) for mark in (
_INTERPOLATION_SPECIAL_ESC, _INTERPOLATION_START,
_INTERPOLATION_SEP, _INTERPOLATION_END)) + r')')
_GET_BOOLEAN_TRUE = ('true', '1', 'yes', 'on', 'enabled')
_GET_BOOLEAN_FALSE = ('false', '0', 'no', 'off', 'disabled')
_GET_BOOLEAN_DEFAULT = None
_DICT_CLASS = collections.OrderedDict
# Use lambda to create a new object every time
_EMPTY_SECTION = lambda self: (self._DICT_CLASS(), self._DICT_CLASS())
def __init__(self, name=None, parent=None, safe_calls=False,
inherit_options=False, subsections=True, ignore_case=True):
"""
Constructor.
:param str name: The name of the section.
:param Section parent: A reference to the parent section object.
:param bool safe_calls: If True, when calling a non-existent
subsection, its closest existing ancestor is returned.
:param bool inherit_options: Whether the section will inherit the
options from its ancestors.
:param bool subsections: If True, subsections are enabled; otherwise
they are disabled.
:param bool ignore_case: If True, section and option names will be
compared ignoring case differences; regular expressions will use
``re.I`` flag.
"""
self._NAME = name
self._PARENT = parent
# TODO: Move constant settings to a Settings class (bug #19)
self._SAFE_CALLS = safe_calls
self._INHERIT_OPTIONS = inherit_options
self._ENABLE_SUBSECTIONS = subsections
self._IGNORE_CASE = ignore_case
self._RE_I = re_.I if self._IGNORE_CASE else 0
self._SECTION = self._SECTION_SUB if self._ENABLE_SUBSECTIONS else \
self._SECTION_PLAIN
self._options = self._DICT_CLASS()
self._subsections = self._DICT_CLASS()
### DATA MODEL ###
def __call__(self, *path, **kwargs):
"""
Enables calling directly the object with a string or sequence of
strings, returning the corresponding subsection object, if existent.
:param path: A sequence of strings, representing a relative path of
section names to the target descendant subsection, whose name is
the last item.
:type path: str
:param bool safe: If True, when calling a non-existent subsection, its
closest existing ancestor is returned.
"""
# The Python 3 definition was:
#def __call__(self, *path, safe=None):
# But to keep compatibility with Python 2 it has been changed to the
# current
safe = kwargs.get('safe')
section = self
for sname in path:
try:
lsname = sname.lower()
except AttributeError:
raise TypeError('Section name mus
|
examachine/pisi
|
tests/buildtests/merhaba-pisi-1.0/usr/bin/merhaba-pisi.py
|
Python
|
gpl-3.0
| 107
| 0.009346
|
#!/
|
usr/bin/env python
# -*- coding: u
|
tf-8 -*-
import os
print "Sana da merhaba %s" % (os.getenv("USER"))
|
spatuloricaria/Uncap
|
eve_site/eve_site/wsgi.py
|
Python
|
gpl-3.0
| 391
| 0.002558
|
"""
WSGI config for eve_site project.
It exposes the WSGI callable as a module-level variable named ``
|
application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deplo
|
yment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "eve_site.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.2/Lib/test/test_cfgparser.py
|
Python
|
mit
| 7,274
| 0
|
import ConfigParser
import StringIO
from test_support import TestFailed, verify
def basic(src):
print "Testing basic accessors..."
cf = ConfigParser.ConfigParser()
sio = StringIO.StringIO(src)
cf.readfp(sio)
L = cf.sections()
L.sort()
verify(L == [r'Commented Bar',
r'Foo Bar',
r'Internationalized Stuff',
r'Section\with$weird%characters[' '\t',
r'Spacey Bar',
],
"unexpected list of section names")
# The use of spaces in the section names serves as a regression test for
# SourceForge bug #115357.
# http://sourceforge.net/bugs/?func=detailbug&group_id=5470&bug_id=115357
verify(cf.get('Foo Bar', 'foo', raw=1) == 'bar')
verify(cf.get('Spacey Bar', 'foo', raw=1) == 'bar')
verify(cf.get('Commented Bar', 'foo', raw=1) == 'bar')
verify('__name__' not in cf.options("Foo Bar"),
'__name__ "option" should not be exposed by the API!')
# Make sure the right things happen for remove_option();
# added to include check for SourceForge bug #123324:
verify(cf.remove_option('Foo Bar', 'foo'),
"remove_option() failed to report existance of option")
verify(not cf.has_option('Foo Bar', 'foo'),
"remove_option() failed to remove option")
verify(not cf.remove_option('Foo Bar', 'foo')
|
,
"remove_option() failed to report non-existance of option"
" that was removed")
try:
cf.remove_option('No Such Section', 'foo')
except ConfigParser.NoSectionError:
pass
else:
raise TestFailed(
"remove_option() failed to report non-existance of option"
" that never existed")
def case_sensiti
|
vity():
print "Testing case sensitivity..."
cf = ConfigParser.ConfigParser()
cf.add_section("A")
cf.add_section("a")
L = cf.sections()
L.sort()
verify(L == ["A", "a"])
cf.set("a", "B", "value")
verify(cf.options("a") == ["b"])
verify(cf.get("a", "b", raw=1) == "value",
"could not locate option, expecting case-insensitive option names")
verify(cf.has_option("a", "b"))
cf.set("A", "A-B", "A-B value")
for opt in ("a-b", "A-b", "a-B", "A-B"):
verify(cf.has_option("A", opt),
"has_option() returned false for option which should exist")
verify(cf.options("A") == ["a-b"])
verify(cf.options("a") == ["b"])
cf.remove_option("a", "B")
verify(cf.options("a") == [])
# SF bug #432369:
cf = ConfigParser.ConfigParser()
sio = StringIO.StringIO("[MySection]\nOption: first line\n\tsecond line\n")
cf.readfp(sio)
verify(cf.options("MySection") == ["option"])
verify(cf.get("MySection", "Option") == "first line\nsecond line")
def boolean(src):
print "Testing interpretation of boolean Values..."
cf = ConfigParser.ConfigParser()
sio = StringIO.StringIO(src)
cf.readfp(sio)
for x in range(1, 5):
verify(cf.getboolean('BOOLTEST', 't%d' % (x)) == 1)
for x in range(1, 5):
verify(cf.getboolean('BOOLTEST', 'f%d' % (x)) == 0)
for x in range(1, 5):
try:
cf.getboolean('BOOLTEST', 'e%d' % (x))
except ValueError:
pass
else:
raise TestFailed(
"getboolean() failed to report a non boolean value")
def interpolation(src):
print "Testing value interpolation..."
cf = ConfigParser.ConfigParser({"getname": "%(__name__)s"})
sio = StringIO.StringIO(src)
cf.readfp(sio)
verify(cf.get("Foo", "getname") == "Foo")
verify(cf.get("Foo", "bar") == "something with interpolation (1 step)")
verify(cf.get("Foo", "bar9")
== "something with lots of interpolation (9 steps)")
verify(cf.get("Foo", "bar10")
== "something with lots of interpolation (10 steps)")
expect_get_error(cf, ConfigParser.InterpolationDepthError, "Foo", "bar11")
def parse_errors():
print "Testing parse errors..."
expect_parse_error(ConfigParser.ParsingError,
"""[Foo]\n extra-spaces: splat\n""")
expect_parse_error(ConfigParser.ParsingError,
"""[Foo]\n extra-spaces= splat\n""")
expect_parse_error(ConfigParser.ParsingError,
"""[Foo]\noption-without-value\n""")
expect_parse_error(ConfigParser.ParsingError,
"""[Foo]\n:value-without-option-name\n""")
expect_parse_error(ConfigParser.ParsingError,
"""[Foo]\n=value-without-option-name\n""")
expect_parse_error(ConfigParser.MissingSectionHeaderError,
"""No Section!\n""")
def query_errors():
print "Testing query interface..."
cf = ConfigParser.ConfigParser()
verify(cf.sections() == [],
"new ConfigParser should have no defined sections")
verify(not cf.has_section("Foo"),
"new ConfigParser should have no acknowledged sections")
try:
cf.options("Foo")
except ConfigParser.NoSectionError, e:
pass
else:
raise TestFailed(
"Failed to catch expected NoSectionError from options()")
try:
cf.set("foo", "bar", "value")
except ConfigParser.NoSectionError, e:
pass
else:
raise TestFailed("Failed to catch expected NoSectionError from set()")
expect_get_error(cf, ConfigParser.NoSectionError, "foo", "bar")
cf.add_section("foo")
expect_get_error(cf, ConfigParser.NoOptionError, "foo", "bar")
def weird_errors():
print "Testing miscellaneous error conditions..."
cf = ConfigParser.ConfigParser()
cf.add_section("Foo")
try:
cf.add_section("Foo")
except ConfigParser.DuplicateSectionError, e:
pass
else:
raise TestFailed("Failed to catch expected DuplicateSectionError")
def expect_get_error(cf, exctype, section, option, raw=0):
try:
cf.get(section, option, raw=raw)
except exctype, e:
pass
else:
raise TestFailed("Failed to catch expected " + exctype.__name__)
def expect_parse_error(exctype, src):
cf = ConfigParser.ConfigParser()
sio = StringIO.StringIO(src)
try:
cf.readfp(sio)
except exctype, e:
pass
else:
raise TestFailed("Failed to catch expected " + exctype.__name__)
basic(r"""
[Foo Bar]
foo=bar
[Spacey Bar]
foo = bar
[Commented Bar]
foo: bar ; comment
[Section\with$weird%characters[""" '\t' r"""]
[Internationalized Stuff]
foo[bg]: Bulgarian
foo=Default
foo[en]=English
foo[de]=Deutsch
""")
case_sensitivity()
boolean(r"""
[BOOLTEST]
T1=1
T2=TRUE
T3=True
T4=oN
T5=yes
F1=0
F2=FALSE
F3=False
F4=oFF
F5=nO
E1=2
E2=foo
E3=-1
E4=0.1
E5=FALSE AND MORE
""")
interpolation(r"""
[Foo]
bar=something %(with1)s interpolation (1 step)
bar9=something %(with9)s lots of interpolation (9 steps)
bar10=something %(with10)s lots of interpolation (10 steps)
bar11=something %(with11)s lots of interpolation (11 steps)
with11=%(with10)s
with10=%(with9)s
with9=%(with8)s
with8=%(with7)s
with7=%(with6)s
with6=%(with5)s
with5=%(with4)s
with4=%(with3)s
with3=%(with2)s
with2=%(with1)s
with1=with
[Mutual Recursion]
foo=%(bar)s
bar=%(foo)s
""")
parse_errors()
query_errors()
weird_errors()
|
madcore-ai/containers
|
kfn/examples/producer.py
|
Python
|
mit
| 207
| 0.009662
|
from kafka import KafkaProducer
#oducer = KafkaProducer(bootstrap_servers='kafka-kf.kafka.svc.cluster.local:90
|
92')
producer = KafkaPr
|
oducer(bootstrap_servers='localhost:9092')
producer.send('test', 'hello')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.