repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
eliksir/mailmojo-python-sdk
|
test/test_page_api.py
|
Python
|
apache-2.0
| 1,093
| 0
|
# coding: utf-8
"""
MailMojo API
v1 of the MailMojo API # noqa: E501
OpenAPI spec version: 1.1.0
Contact: hjelp@mailmojo.no
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import mailmojo_sdk
from mailmojo_sdk.api.page_api import PageApi # noqa: E501
from mailmojo_sdk.rest import ApiException
class TestPageApi(unittest.TestCase):
"""PageApi unit test stubs"""
def setUp(self):
self.api = mailmojo_sdk.api.p
|
age_api.PageApi
|
() # noqa: E501
def tearDown(self):
pass
def test_get_page_by_id(self):
"""Test case for get_page_by_id
Retrieve a landing page. # noqa: E501
"""
pass
def test_get_pages(self):
"""Test case for get_pages
Retrieve all landing pages. # noqa: E501
"""
pass
def test_update_page(self):
"""Test case for update_page
Update a landing page partially. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
chhantyal/exchange
|
uhura/exchange/utils.py
|
Python
|
bsd-3-clause
| 183
| 0.010929
|
"""
Utilities and helper functions
"""
def get_object_or_none(model, **kwargs):
try:
|
return model.objects.get(**kwargs)
except model.DoesNotExist
|
:
return None
|
MikeLing/shogun
|
examples/undocumented/python/classifier_gmnpsvm.py
|
Python
|
gpl-3.0
| 912
| 0.046053
|
#!/usr/bin/env python
traindat = '../data/fm_train_real.dat'
testdat = '../data/fm_test_real.dat'
label_traindat = '../data/label_train_multiclass.dat'
parameter_list = [[t
|
raindat,testdat,label_traindat,2.1,1,1e-5],[traindat,testdat,label_traindat,2.2,1,1e-5]]
def classifier_gmnpsvm (train_fname=traindat,test_fname=testdat,label_fname=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun import RealFeatures, MulticlassLabels
from shogun import GaussianKernel, GMNPSVM, CSVFile
feats_train=RealFeatures(CSVFile(t
|
rain_fname))
feats_test=RealFeatures(CSVFile(test_fname))
labels=MulticlassLabels(CSVFile(label_fname))
kernel=GaussianKernel(feats_train, feats_train, width)
svm=GMNPSVM(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.train(feats_train)
out=svm.apply(feats_test).get_labels()
return out,kernel
if __name__=='__main__':
print('GMNPSVM')
classifier_gmnpsvm(*parameter_list[0])
|
dhuang/incubator-airflow
|
airflow/example_dags/test_utils.py
|
Python
|
apache-2.0
| 1,172
| 0
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the Lic
|
ense.
"""Used for unit tests"""
from airflow import DAG
from airflow.operators.bash import BashOperator
|
from airflow.utils.dates import days_ago
with DAG(dag_id='test_utils', schedule_interval=None, tags=['example']) as dag:
task = BashOperator(
task_id='sleeps_forever',
bash_command="sleep 10000000000",
start_date=days_ago(2),
owner='airflow',
)
|
Akylas/CouchPotatoServer
|
couchpotato/core/notifications/history/main.py
|
Python
|
gpl-3.0
| 732
| 0.01776
|
from couchpotato import get_session
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from couchpotato.core.settings.model import History as Hist
import time
log = CPLog(__name__)
class History(Notification):
listen_to = ['movie.downloaded', 'movie.snatched', 'renamer.canceled']
def
|
notify(self, message = '', data = {}, listener = None):
db = get_session()
history = Hist(
added = int(time.time()),
message = toUnicode(message),
release_id = data.g
|
et('id', 0)
)
db.add(history)
db.commit()
#db.close()
return True
|
Xion/recursely
|
recursely/__init__.py
|
Python
|
bsd-2-clause
| 1,663
| 0
|
"""
recursely
"""
__version__ = "0.1"
__description__ = "Recursive importer for Python submodules"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
import sys
from recursely._compat import IS_PY3
from recursely.importer import Recurs
|
iveImporter
from recursely.utils import SentinelList
__all__ = ['install']
def install(retroactive=True):
"""Install the recursive import hook in ``sys.meta_path``,
enabling the use of ``__recursive__`` directive.
:param retroactive: Whether the hook should be retroactively applied
|
to module's that have been imported before
it was installed.
"""
if RecursiveImporter.is_installed():
return
importer = RecursiveImporter()
# because the hook is a catch-all one, we ensure that it's always
# at the very end of ``sys.meta_path``, so that it's tried only if
# no other (more specific) hook has been chosen by Python
if IS_PY3:
for i in reversed(range(len(sys.meta_path))):
ih_module = getattr(sys.meta_path[i], '__module__', '')
is_builtin = ih_module == '_frozen_importlib'
if not is_builtin:
break
sys.meta_path = SentinelList(
sys.meta_path[:i],
sentinels=[importer] + sys.meta_path[i:])
else:
sys.meta_path = SentinelList(sys.meta_path, sentinel=importer)
# look through already imported packages and recursively import
# their submodules, if they contain the ``__recursive__`` directive
if retroactive:
for module in list(sys.modules.values()):
importer.recurse(module)
|
ESSS/err
|
errbot/repo_manager.py
|
Python
|
gpl-3.0
| 9,791
| 0.001634
|
import logging
import os
import shutil
import subprocess
from collections import namedtuple
from datetime import timedelta, datetime
from os import path
import tarfile
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
from urllib.parse import urlparse
import json
import re
from errbot.plugin_manager import check_dependencies
from errbot.storage import StoreMixin
from .utils import ON_WINDOWS
log = logging.getLogger(__name__)
def human_name_for_git_url(url):
# try to humanize the last part of the git url as much as we can
s = url.split(':')[-1].split('/')[-2:]
if s[-1].endswith('.git'):
s[-1] = s[-1][:-4]
return str('/'.join(s))
INSTALLED_REPOS = 'installed_repos'
REPO_INDEXES_CHECK_INTERVAL = timedelta(hours=1)
REPO_INDEX = 'repo_index'
LAST_UPDATE = 'last_update'
RepoEntry = namedtuple('RepoEntry', 'entry_name, name, python, repo, path, avatar_url, documentation')
find_words = re.compile(r"(\w[\w']*\w|\w)")
class RepoException(Exception):
pass
def makeEntry(repo_name, plugin_name, json_value):
return RepoEntry(entry_name=repo_name,
name=plugin_name,
python=json_value['python'],
repo=json_value['repo'],
path=json_value['path'],
avatar_url=json_value['avatar_url'],
documentation=json_value['documentation'])
def tokenizeJsonEntry(json_dict):
"""
Returns all the words in a repo entry.
"""
return set(find_words.findall(' '.join((word.lower() for word in json_dict.values()))))
def which(program):
if ON_WINDOWS:
program += '.exe'
def is_exe(file_path):
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
class BotRepoManager(StoreMixin):
"""
Manages the repo list, git clones/updates or the repos.
"
|
""
def __init__(self, storage_plugin, plugin_dir, plugin_indexes):
"""
Make a repo manager.
:param storage_plugin: where the manager store its state.
:param plugin_dir: where on disk it will git clone the repos.
:param plugin_indexes: a list of URL / path to get the json repo index.
"""
super().
|
__init__()
self.plugin_indexes = plugin_indexes
self.storage_plugin = storage_plugin
self.plugin_dir = plugin_dir
self.open_storage(storage_plugin, 'repomgr')
def shutdown(self):
self.close_storage()
def check_for_index_update(self):
if REPO_INDEX not in self:
log.info('No repo index, creating it.')
self.index_update()
return
if datetime.fromtimestamp(self[REPO_INDEX][LAST_UPDATE]) < datetime.now() - REPO_INDEXES_CHECK_INTERVAL:
log.info('Index is too old, update it.')
self.index_update()
def index_update(self):
index = {LAST_UPDATE: datetime.now().timestamp()}
for source in reversed(self.plugin_indexes):
try:
if urlparse(source).scheme in ('http', 'https'):
with urlopen(url=source, timeout=10) as request: # nosec
log.debug('Update from remote source %s...', source)
encoding = request.headers.get_content_charset()
content = request.read().decode(encoding if encoding else 'utf-8')
else:
with open(source, encoding='utf-8', mode='r') as src_file:
log.debug('Update from local source %s...', source)
content = src_file.read()
index.update(json.loads(content))
except (HTTPError, URLError, IOError):
log.exception('Could not update from source %s, keep the index as it is.', source)
break
else:
# nothing failed so ok, we can store the index.
self[REPO_INDEX] = index
log.debug('Stored %d repo entries.', len(index) - 1)
def get_repo_from_index(self, repo_name):
"""
Retrieve the list of plugins for the repo_name from the index.
:param repo_name: the name of hte repo
:return: a list of RepoEntry
"""
plugins = self[REPO_INDEX].get(repo_name, None)
if plugins is None:
return None
result = []
for name, plugin in plugins.items():
result.append(makeEntry(repo_name, name, plugin))
return result
def search_repos(self, query):
"""
A simple search feature, keywords are AND and case insensitive on all the fields.
:param query: a string query
:return: an iterator of RepoEntry
"""
# first see if we are up to date.
self.check_for_index_update()
if REPO_INDEX not in self:
log.error('No index.')
return
query_work_set = set(find_words.findall(query.lower()))
for repo_name, plugins in self[REPO_INDEX].items():
if repo_name == LAST_UPDATE:
continue
for plugin_name, plugin in plugins.items():
if query_work_set.intersection(tokenizeJsonEntry(plugin)):
yield makeEntry(repo_name, plugin_name, plugin)
def get_installed_plugin_repos(self):
return self.get(INSTALLED_REPOS, {})
def add_plugin_repo(self, name, url):
repos = self.get_installed_plugin_repos()
repos[name] = url
self[INSTALLED_REPOS] = repos
def set_plugin_repos(self, repos):
""" Used externally.
"""
self[INSTALLED_REPOS] = repos
def get_all_repos_paths(self):
return [os.path.join(self.plugin_dir, d) for d in self.get(INSTALLED_REPOS, {}).keys()]
def install_repo(self, repo):
"""
Install the repository from repo
:param repo:
The url, git url or path on disk of a repository. It can point to either a git repo or
a .tar.gz of a plugin
:returns:
The path on disk where the repo has been installed on.
:raises: :class:`~RepoException` if an error occured.
"""
self.check_for_index_update()
# try to find if we have something with that name in our index
if repo in self[REPO_INDEX]:
human_name = repo
repo_url = next(iter(self[REPO_INDEX][repo].values()))['repo']
elif not repo.endswith('tar.gz'):
# This is a repo url, make up a plugin definition for it
human_name = human_name_for_git_url(repo)
repo_url = repo
else:
repo_url = repo
git_path = which('git')
if not git_path:
raise RepoException('git command not found: You need to have git installed on '
'your system to be able to install git based plugins.', )
# TODO: Update download path of plugin.
if repo_url.endswith('tar.gz'):
fo = urlopen(repo_url) # nosec
tar = tarfile.open(fileobj=fo, mode='r:gz')
tar.extractall(path=self.plugin_dir)
s = repo_url.split(':')[-1].split('/')[-1]
human_name = s[:-len('.tar.gz')]
else:
human_name = human_name or human_name_for_git_url(repo_url)
p = subprocess.Popen([git_path, 'clone', repo_url, human_name], cwd=self.plugin_dir, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
feedback = p.stdout.read().decode('utf-8')
error_feedback = p.stderr.read().decode('utf-8')
if p.wait():
raise RepoException("Could not load this plugin: \n\n%s\n\n---\n\n%s" % (feedback, error_feedback))
self.add_plugin_repo(human_name, repo_url)
return os.pa
|
lkouame/cs3240-labdemo
|
new.py
|
Python
|
mit
| 71
| 0.014085
|
#include h
|
eader
def new_method(msg):
greeting(msg)
print(
|
msg)
|
platsch/OctoPNP
|
octoprint_OctoPNP/ImageProcessingCaller.py
|
Python
|
agpl-3.0
| 974
| 0.004107
|
# -*- coding: utf-8 -*-
""" This file is part of OctoPNP
This is a test script to execute the imageprocessing-steps independent from the main software
and particularly without a running printer.
Main author: Florens Wasserfall <wasserfall@kalanka.de>
"""
import time
import Ima
|
geProcessing
im = ImageProcessing.ImageProcessing(15.0, 120, 120)
start_time = time.time()
im.SetInteractive(True)
# im.locatePartInBox("../utils/testimages/head_atmega_SO8.png", False)
# im.locatePartInBox("../utils/testimages/head_atmega_SO8_2.png", False)
# print im.getPartOrientation("../utils/testimages/bed_atmega_SO8_rotated.png", 30)
im.getPartOrientation("../utils
|
/testimages/orientation_bed_atmega_SO8_green.png", 55.65)
# im.getPartPosition("../utils/testimages/orientation_bed_atmega_SO8_green.png", 55.65)
# im.getPartOrientation("../utils/testimages/bed_resistor_1206.png", 55.65)
end_time = time.time()
print("--- %s seconds ---" % (time.time() - start_time))
|
gauravmm/gauravmanek.com
|
generators/bkggen.py
|
Python
|
mit
| 3,912
| 0.025562
|
#!/usr/bin/python3
import random
import svgwrite
from svgwrite import rgb
def bkggen_square(name):
# image size
img_draw_sz = "6cm"
img_cell_count = 24
img_cell_sz = 5
img_gutter_sz = 1
colors = [rgb(col, col, col) for col in [246, 248, 250, 252, 255]]
dwg = svgwrite.Drawing(name, (img_draw_sz, img_draw_sz), debug=True)
# Define a user coordinate system:
img_user_sz = (img_cell_sz + img_gutter_sz) * img_cell_count
dwg.viewbox(0, 0, img_user_sz, img_user_sz)
for _x in range(0, img_user_sz, img_cell_sz + img_gutter_sz):
for _y in range(0, img_user_sz, img_cell_sz + img_gutter_sz):
_fill = random.choice(colors)
if _fill is not None:
dwg.add(dwg.rect(insert=(_x, _y), size=(img_cell_sz, img_cell_sz), fill=_fill))
dwg.save()
## end of http://code.activestate.com/recipes/577111/ }}}
def bkggen_triangle(name):
# image size
img_draw_wd = "5.5cm"
img_draw_ht = "6cm"
img_cell_count = 12
img_cell_sz = 5
img_gutter_sz = 1
colors = [0.05, 0.10, 0.15, 0.2]
base_color = rgb(255, 255, 255)
dwg = svgwrite.Drawing(name, (img_draw_wd, img_draw_ht), debug=True)
# Define a user coordinate system:
img_user_sz = (img_cell_sz + img_gutter_sz) * img_cell_count
#Scale everything by 2 to prevent weird round-off errors
img_cell_sz *= 2
img_gutter_sz *= 2
img_user_sz *= 2
dwg.viewbox(0, 0,
(img_cell_sz + img_gutter_sz/2) * img_cell_count,
(img_cell_sz + img_gutter_sz) * img_cell_count)
#dwg.add(dwg.rect(insert=(0, 0), size=((img_cell_sz + img_gutter_sz/2) * img_cell_count, (img_cell_sz + img_gutter_sz) * img_cell_count), fill=rgb(128, 0, 0)))
color_band_first = None
for _Cy in range(img_cell_count * 2 - 1, -2, -1):
_y = _Cy * (img_cell_sz + img_gutter_sz) / 2
|
+ img_gutter_sz/2
color_band = [random.choice(colors) fo
|
r _Cx in range(img_cell_count)]
if color_band_first is None:
color_band_first = color_band
elif _Cy == -1:
color_band = color_band_first
for _Cx, _fill in enumerate(color_band):
_x = _Cx * (img_cell_sz + img_gutter_sz / 2)
if _fill is not None:
if (_Cx + _Cy) % 2 == 0:
dwg.add(dwg.polygon(points=[(_x, _y), (_x + img_cell_sz, _y + img_cell_sz/2), (_x, _y + img_cell_sz)], fill=base_color, fill_opacity=_fill))
else:
dwg.add(dwg.polygon(points=[(_x + img_cell_sz, _y), (_x, _y + img_cell_sz/2), (_x + img_cell_sz, _y + img_cell_sz)], fill=base_color, fill_opacity=_fill))
dwg.save()
def bkggen_triangle_optimize(name):
# image size
img_draw_wd = "5.5cm"
img_draw_ht = "6cm"
img_cell_count = 12
img_cell_sz = 5
img_gutter_sz = 1
opacity = [0.05, 0.10, 0.15, 0.2]
dwg = svgwrite.Drawing(name, (img_draw_wd, img_draw_ht), debug=True)
# Define a user coordinate system:
img_user_sz = (img_cell_sz + img_gutter_sz) * img_cell_count
#Scale everything by 2 to prevent weird round-off errors
img_cell_sz *= 2
img_gutter_sz *= 2
img_user_sz *= 2
dwg.viewbox(0, 0,
(img_cell_sz + img_gutter_sz/2) * img_cell_count,
(img_cell_sz + img_gutter_sz) * img_cell_count)
opacity_layers = [dwg.add(dwg.g(fill='white', fill_opacity=op)) for op in opacity]
color_band_first = None
for _Cy in range(img_cell_count * 2 - 1, -2, -1):
_y = _Cy * (img_cell_sz + img_gutter_sz) / 2 + img_gutter_sz/2
color_band = [random.choice(opacity_layers) for _Cx in range(img_cell_count)]
if color_band_first is None:
color_band_first = color_band
elif _Cy == -1:
color_band = color_band_first
for _Cx, _fill in enumerate(color_band):
_x = _Cx * (img_cell_sz + img_gutter_sz / 2)
if _fill is not None:
if (_Cx + _Cy) % 2 == 0:
_fill.add(dwg.polygon(points=[(_x, _y), (_x + img_cell_sz, _y + img_cell_sz/2), (_x, _y + img_cell_sz)]))
else:
_fill.add(dwg.polygon(points=[(_x + img_cell_sz, _y), (_x, _y + img_cell_sz/2), (_x + img_cell_sz, _y + img_cell_sz)]))
dwg.save()
if __name__ == '__main__':
bkggen_triangle_optimize("../source/img/bkg.svg")
|
chris-clm09/bzflag
|
bzagents/pfAgent.py
|
Python
|
gpl-3.0
| 14,123
| 0.004178
|
#!/usr/bin/python -tt
import sys
import math
import time
from myPrint import *
from bzrc import BZRC, Command
###########################Potential Field Fun############################################
####################################################################
# Distance between two points.
####################################################################
def distance(x, y, goal):
return math.sqrt(((goal.y - y)*(goal.y - y)) + ((goal.x - x)*(goal.x - x)))
def distance_points(x, y, xg, yg):
return math.sqrt(((yg - y)*(yg - y)) + ((xg - x)*(xg - x)))
def sign(a):
if a == 0 or a == -0:
return 0
return a / -a
####################################################################
# Generate a Single Repulsive field.
####################################################################
def generate_a_repulsive_field(x, y, obstacle, make_it_tangent=False, goal=None):
r = distance_points(obstacle[0][0],
obstacle[0][1],
obstacle[2][0],
obstacle[2][1]) / 2.0
center = (obstacle[0][0] + ((obstacle[2][0] - obstacle[0][0]) / 2.0),
obstacle[0][1] + ((obstacle[2][1] - obstacle[0][1]) / 2.0))
s = 60.0
b = 1.0/s
d = distance_points(x, y, center[0], center[1])
theta = math.atan2(center[1] - y, center[0] - x)
dx = -math.cos(theta)
dy = -math.sin(theta)
if make_it_tangent:
theta_l = theta - (math.pi / 2.0)
theta_r = theta + (math.pi / 2.0)
dx_l = -math.cos(theta_l)
dy_l = -math.sin(theta_l)
dx_r = -math.cos(theta_r)
dy_r = -math.sin(theta_r)
if distance_points(x + dx_l, y + dy_l, goal.x, goal.y) < distance_points(x+dx_r, y+dy_r, goal.x, goal.y):
dx = dx_l
dy = dy_l
else:
dx = dx_r
dy = dy_r
temp = None
if d < r:
temp = (dx * s, dy * s)
elif r <= d and d <= s+r:
temp = (b * (s + r -
|
d) * dx, b * (s + r - d) * dy)
elif d > s+r:
temp = (0, 0)
return temp
####################################################################
# Calculate repulsive fields on a given location.
####################################################################
def generate_repulsive_field(x, y, obstacles):
total = [0, 0]
f
|
or o in obstacles:
temp = generate_a_repulsive_field(x, y, o)
total[0] += temp[0]
total[1] += temp[1]
return total
####################################################################
# Generate a single attractive vector.
####################################################################
def gen_an_attractive_field(x, y, goal):
r = 1.5
s = 30.0
al = 1.0/s
d = distance(x, y, goal)
theta = math.atan2(goal.y - y, goal.x - x)
temp = None
if d < r:
temp = (0.0, 0.0)
elif r <= d and d <= s+r:
temp = (al*(d-r)*math.cos(theta), al*(d-r)*math.sin(theta))
elif d > s+r:
temp = (al*s*math.cos(theta), al*s*math.sin(theta))
return temp
####################################################################
# Return the closest goal.
####################################################################
def get_min_goal(x, y, goals):
a_min = distance(x, y, goals[0])
min_goal = goals[0]
for g in goals:
temp = distance(x, y, g)
if temp < a_min:
min_goal = g
a_min = temp
return min_goal
####################################################################
# Generates the attractive vector given every possible goal.
####################################################################
def generate_attractive_field(x, y, goals):
min_goal = get_min_goal(x, y, goals)
return gen_an_attractive_field(x, y, min_goal)
####################################################################
# Calculate a Tangential field
####################################################################
def generate_tangential_fields(x, y, obstacles, goal):
total = [0, 0]
for o in obstacles:
temp = generate_a_repulsive_field(x, y, o, True, goal)
total[0] += temp[0]
total[1] += temp[1]
return total
####################################################################
# Generate the potential field for a given point.
####################################################################
def generate_potential_field(x, y, flags, obstacles):
tan = generate_tangential_fields(x, y, obstacles, get_min_goal(x, y, flags))
att = generate_attractive_field(x, y, flags)
rep = generate_repulsive_field(x, y, obstacles)
return (tan[0] + att[0] + rep[0],
tan[1] + att[1] + rep[1])
class HomeBaseCenter(object):
def __init__(self, x, y):
self.x = x
self.y = y
####################################################################
####################################################################
## Calculate a Tangential field
####################################################################
####################################################################
class Agent(object):
"""Class handles all command and control logic for a team's tanks."""
####################################################################
# Constructor
####################################################################
def __init__(self, bzrc):
self.bzrc = bzrc
self.constants = self.bzrc.get_constants()
self.obstacles = self.bzrc.get_obstacles()
self.commands = []
self.error0 = 0
self.my_flag = self.get_my_flag(bzrc.get_flags())
self.my_tanks = None
self.other_tanks = None
self.other_flags = None
self.shots = None
self.enemies = None
self.kp = 0.60
self.kd = 0.50
bases = self.bzrc.get_bases()
for base in bases:
if base.color == self.constants['team']:
self.home_base = base
self.home_base_center = HomeBaseCenter(self.home_base.corner1_x + ((self.home_base.corner3_x - self.home_base.corner1_x) / 2.0),
self.home_base.corner1_y + ((self.home_base.corner3_y - self.home_base.corner1_y) / 2.0))
self.time_set = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
self.error0 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
####################################################################
####################################################################
def tick(self, time_diff):
my_tanks, other_tanks, flags, shots = self.bzrc.get_lots_o_stuff()
self.my_tanks = my_tanks
self.my_flag = self.get_my_flag(flags)
self.other_tanks = other_tanks
self.other_flags = self.remove_my_flag(flags)
self.shots = shots
self.enemies = [tank for tank in other_tanks
if tank.color != self.constants['team']]
#Clear Commands
self.commands = []
for tank in my_tanks:
#if tank.index == 0:
# self.sendToCaptureFlag(tank, time_diff)
self.send_to_capture_flag(tank, time_diff)
#self.attack_enemies(tank)
results = self.bzrc.do_commands(self.commands)
####################################################################
####################################################################
def determined_goals(self, tank):
result = self.ignore_flags_we_carry(self.other_flags)
if not self.is_our_flag_at_home():
result.append(self.my_flag) # someone moved our flag, retrieving it is a possible goal
if tank.flag != '-' or len(result) == 0:
return [self.home_base_center] # go home if we have nothing else to do
else:
return result
def generate_home_potential_field(self, x, y):
return generate_potential_field(x, y, [self.home_base_center], self.obstacles)
#############################################
|
LiqunHu/MVPN
|
complineProj.py
|
Python
|
gpl-3.0
| 143
| 0.006993
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 21
|
16:48:46 2016
@author: huliqun
"""
import compileal
|
l
compileall.compile_dir('..')
|
mpiplani/Online-Pharmacy
|
online_pharmacy/online_pharmacy/urls.py
|
Python
|
apache-2.0
| 542
| 0.01476
|
from django.conf.urls import url,include
from django.contrib import admin
urlpatterns = [
url(r'^
|
',include('Register_and_login.urls')),
url(r'^homepage/',include('MainPage.urls')),
url(r'^username/cart/',include('cart.urls')),
url(r'^username/',include('customer.urls')),
url(r'^pharmacy_name/',include('pharmacy.ur
|
ls')),
url(r'^pharmacy_name/inventory',include('inventory.urls')),
url(r'^search/all_search=pcm',include('items.urls')),
url(r'^', include('order.urls')),
url(r'^admin/', admin.site.urls)
]
|
mjuenema/nomit
|
setup.py
|
Python
|
bsd-2-clause
| 902
| 0.02439
|
with open('README.txt') as f:
long_description = f.read()
from distutils.core import setup
setup(
name
|
= "nomit",
packages = ["nomit"],
version = "1.0",
description = "Process Monit HTTP/XML",
author = "Markus Juenemann",
author_email = "markus@juenemann.net",
url = "https://github.com/mjuenema/nomit",
download_url = "https://github.com/mjuenema/nomit/tarball/1.0",
keywords = ["xml", "Monit", "MMonit"],
classifiers = [
"Programming Language :: Python",
"Development Status :: 4
|
- Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Monitoring",
],
long_description = long_description
)
|
pi19404/mbed
|
workspace_tools/export/gccarm.py
|
Python
|
apache-2.0
| 3,520
| 0
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from exporters import Exporter
from os.path import splitext, basename
class GccArm(Exporter):
NAME = 'GccArm'
TOOLCHAIN = 'GCC_ARM'
TARGETS = [
'LPC1768',
'LPC1549',
'KL05Z',
'KL25Z',
'KL43Z',
'KL46Z',
'K64F',
'K22F',
'K20D50M',
'LPC4088',
'LPC4088_DM',
'LPC4330_M4',
'LPC11U24',
'LPC1114',
'LPC11U35_401',
'LPC11U35_501',
'LPC11U37H_401',
'LPC824',
'SSCI824',
'STM32F407',
'DISCO_F100RB',
'DISCO_F051R8',
'DISCO_F407VG',
'DISCO_F303VC',
'DISCO_F746NG',
'DISCO_L476VG',
'UBLOX_C027',
'ARCH_PRO',
'NRF51822',
'HRM1017',
'RBLAB_NRF51822',
'LPC2368',
'LPC2460',
'LPCCAPPUCCINO',
'ARCH_BLE',
'MTS_GAMBIT',
'ARCH_MAX',
'NUCLEO_F401RE',
'NUCLEO_F411RE',
'NUCLEO_F446RE',
'ARCH_MAX',
'DISCO_F429ZI',
'NUCLEO_F030R8',
'NUCLEO_F070RB',
'NUCLEO_F072RB',
'NUCLEO_F091RC',
'NUCLEO_F103RB',
'NUCLEO_F302R8',
'NUCLEO_F303RE',
'NUCLEO_F334R8',
'DISCO_L053C8',
'NUCLEO_L053R8',
'NUCLEO_L073RZ',
'DISCO_F334C8',
'MAX32600MBED',
'MAXWSNENV',
'MTS_MDOT_F405RG',
'MTS_MDOT_F411RE',
'NUCLEO_L152RE',
'NRF51_DK',
'NRF51_DONGLE',
'SEEED_TINY_BLE',
'DISCO_F401VC',
'DELTA_DFCM_NNN40',
'RZ_A1H',
'MOTE_L
|
152RC',
'EFM32WG_STK3800',
'EFM32LG_STK3600',
'EFM32GG_STK3700',
'EFM32ZG_STK3200',
'EFM32HG_STK3400',
'NZ32SC151',
|
'SAMR21G18A',
'TEENSY3_1',
]
DOT_IN_RELATIVE_PATH = True
def generate(self):
# "make" wants Unix paths
self.resources.win_to_unix()
to_be_compiled = []
for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
r = getattr(self.resources, r_type)
if r:
for source in r:
base, ext = splitext(source)
to_be_compiled.append(base + '.o')
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.program_name,
'to_be_compiled': to_be_compiled,
'object_files': self.resources.objects,
'include_paths': self.resources.inc_dirs,
'library_paths': self.resources.lib_dirs,
'linker_script': self.resources.linker_script,
'libraries': libraries,
'symbols': self.get_symbols(),
'cpu_flags': self.toolchain.cpu
}
self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile')
|
ktok07b6/polyphony
|
tests/if/if28.py
|
Python
|
mit
| 922
| 0
|
from polyphony import testbench
def g(x):
|
if x == 0:
return 0
return 1
def h(x):
if x == 0:
pass
def f(v, i, j, k):
if i == 0:
return v
elif i == 1:
return v
elif i == 2:
h(g(j) + g(k))
return v
elif i == 3:
for m in range(j):
v += 2
return v
else:
for n in range(i):
|
v += 1
return v
def if28(code, r1, r2, r3, r4):
if code == 0:
return f(r1, r2, r3, r4)
return 0
@testbench
def test():
assert 1 == if28(0, 1, 1, 0, 0)
assert 2 == if28(0, 2, 0, 0, 0)
assert 3 == if28(0, 3, 1, 0, 0)
assert 4 == if28(0, 4, 2, 0, 0)
assert 5 == if28(0, 5, 2, 1, 1)
assert 6 == if28(0, 6, 2, 2, 2)
assert 7 == if28(0, 7, 3, 0, 0)
assert 10 == if28(0, 8, 3, 1, 1)
assert 13 == if28(0, 9, 3, 2, 2)
assert 14 == if28(0, 10, 4, 0, 0)
test()
|
botswana-harvard/edc-dashboard
|
edc_dashboard/middleware.py
|
Python
|
gpl-2.0
| 1,322
| 0.007564
|
from django.conf import settings
from edc_constants.constants import MALE, FEMALE, OTHER, YES, NO, NOT_APPLICABLE
from edc_constants.constants import NEW, OPEN, CLOSED
class DashboardMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(sel
|
f, request):
try:
request.url_name_data
except AttributeError:
request.url_name_data = {}
try:
request.template_data
except AttributeError:
request.template_data = {}
response = self.get_response(request)
return response
def process_template_r
|
esponse(self, request, response):
try:
reviewer_site_id = settings.REVIEWER_SITE_ID
except AttributeError:
reviewer_site_id = None
options = {'OPEN':OPEN,
'CLOSED':CLOSED,
'FEMALE':FEMALE,
'NEW':NEW,
'NO':NO,
'NOT_APPLICABLE':NOT_APPLICABLE,
'OTHER':OTHER,
'YES':YES,
'reviewer_site_id':reviewer_site_id}
try:
response.context_data.update(**options)
except AttributeError:
response.renderer_context.update(**options)
return response
|
opennode/nodeconductor-assembly-waldur
|
src/waldur_auth_valimo/client.py
|
Python
|
mit
| 9,636
| 0.002283
|
import logging
from urllib.parse import urljoin
import lxml.etree # noqa: S410
import requests
from django.conf import settings as django_settings
from django.utils import timezone
logger = logging.getLogger(__name__)
class ClientError(Exception):
pass
class ResponseParseError(ClientError):
pass
class ResponseStatusError(ClientError):
pass
class RequestError(ClientError):
def __init__(self, message, response=None):
super(RequestError, self).__init__(message)
self.response = response
class UnknownStatusError(ResponseParseError):
pass
class Response:
ns_namespace = 'http://uri.etsi.org/TS102204/v1.1.2#'
def __init__(self, content):
etree = lxml.etree.fromstring(content) # noqa: S320
self.init_response_attributes(etree)
def init_response_attributes(self, etree):
""" Define response attributes based on valimo request content """
raise NotImplementedError
class Request:
url = NotImplemented
template = NotImplemented
response_class = NotImplemented
settings = getattr(django_settings, 'WALDUR_AUTH_VALIMO', {})
@classmethod
def execute(cls, **kwargs):
url = cls._get_url()
headers = {
'content-type': 'text/xml',
'SOAPAction': url,
}
data = cls.template.strip().format(
AP_ID=cls.settings['AP_ID'],
AP_PWD=cls.settings['AP_PWD'],
Instant=cls._format_datetime(timezone.now()),
DNSName=cls.settings['DNSName'],
**kwargs
)
cert = (cls.settings['cert_path'], cls.settings['key_path'])
# TODO: add verification
logger.debug(
'Executing POST request to %s with data:\n %s \nheaders: %s',
url,
data,
headers,
)
response = requests.post(
url,
data=data,
headers=headers,
cert=cert,
verify=cls.settings['verify_ssl'],
)
if response.ok:
return cls.response_class(response.content)
else:
message = (
'Failed to execute POST request against %s endpoint. Response [%s]: %s'
% (url, response.status_code, response.content)
)
raise RequestError(message, response)
@classmethod
def _format_datetime(cls, d):
return d.strftime('%Y-%m-%dT%H:%M:%S.000Z')
@classmethod
def _format_transaction_id(cls, transaction_id):
return ('_' + transaction_id)[:32] # such formation is required by server.
@classmethod
def _get_url(cls):
return urljoin(cls.settings['URL'], cls.url)
class SignatureResponse(Response):
def init_response_attributes(self, etree):
try:
self.backend_transaction_id = etree.xpath('//MSS_SignatureResp')[0].attrib[
'MSSP_TransID'
]
self.status = etree.xpath(
'//ns6:StatusCode', namespaces={'ns6': self.ns_namespace}
)[0].attrib['Value']
except (IndexError, KeyError, lxml.etree.XMLSchemaError) as e:
raise ResponseParseError(
'Cannot parse signature response: %s. Response content: %s'
% (e, lxml.etree.tostring(etree))
)
class SignatureRequest(Request):
url = '/MSSP/services/MSS_Signature'
template = """
<?xml version="1.0" encoding="UTF-8"?>
<soapenv:Envelope xmlns:soapenv="http://www.w3.org/2003/05/soap-envelope"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soapenv:Body>
<MSS_Signature xmlns="">
<MSS_SignatureReq MajorVersion="1" MessagingMode="{MessagingMode}" MinorVersion="1" TimeOut="300">
<ns1:AP_Info AP_ID="{AP_ID}" AP_PWD="{AP_PWD}" AP_TransID="{AP_TransID}"
Instant="{Instant}" xmlns:ns1="http://uri.etsi.org/TS102204/v1.1.2#"/>
<ns2:MSSP_Info xmlns:ns2="http://uri.etsi.org/TS102204/v1.1.2#">
<ns2:MSSP_ID>
<ns2:DNSName>{DNSName}</ns2:DNSName>
</ns2:MSSP_ID>
</ns2:MSSP_Info>
<ns3:MobileUser xmlns:ns3="http://uri.etsi.org/TS102204/v1.1.2#">
<ns3:MSISDN>{MSISDN}</ns3:MSISDN>
</ns3:MobileUser>
<ns4:DataToBeSigned Encoding="UTF-8" MimeType="text/plain" xmlns:ns4="http://uri.etsi.org/TS102204/v1.1.2#">
{DataToBeSigned}
</ns4:DataToBeSigned>
<ns5:SignatureProfile xmlns:ns5="http://uri.etsi.org/TS102204/v1.1.2#">
<ns5:mssURI>{SignatureProfile}</ns5:mssURI>
</ns5:SignatureProfile>
<ns6:MSS_Format xmlns:ns6="http://uri.etsi.org/TS102204/v1.1.2#">
<ns6:mssURI>http://uri.etsi.org/TS102204/v1.1.2#PKCS7</ns6:mssURI>
</ns6:
|
MSS_Format>
</MSS_SignatureReq>
</MSS_Signature>
</soapenv:Body>
</soapenv:Envelope>
""
|
"
response_class = SignatureResponse
@classmethod
def execute(cls, transaction_id, phone, message):
kwargs = {
'MessagingMode': 'asynchClientServer',
'AP_TransID': cls._format_transaction_id(transaction_id),
'MSISDN': phone,
'DataToBeSigned': '%s %s' % (cls.settings['message_prefix'], message),
'SignatureProfile': cls.settings['SignatureProfile'],
}
return super(SignatureRequest, cls).execute(**kwargs)
class Statuses:
OK = 'OK'
PROCESSING = 'Processing'
ERRED = 'Erred'
@classmethod
def map(cls, status_code):
if status_code == '502':
return cls.OK
elif status_code == '504':
return cls.PROCESSING
else:
raise UnknownStatusError(
'Received unsupported status in response: %s' % status_code
)
class StatusResponse(Response):
def init_response_attributes(self, etree):
try:
status_code = etree.xpath(
'//ns5:StatusCode', namespaces={'ns5': self.ns_namespace}
)[0].attrib['Value']
except (IndexError, KeyError, lxml.etree.XMLSchemaError) as e:
raise ResponseParseError(
'Cannot parse status response: %s. Response content: %s'
% (e, lxml.etree.tostring(etree))
)
self.status = Statuses.map(status_code)
try:
civil_number_tag = etree.xpath(
'//ns4:UserIdentifier', namespaces={'ns4': self.ns_namespace}
)[0]
except IndexError:
# civil number tag does not exist - this is possible if request is still processing
return
else:
try:
self.civil_number = civil_number_tag.text.split('=')[1]
except IndexError:
raise ResponseParseError(
'Cannot get civil_number from tag text: %s' % civil_number_tag.text
)
class ErredStatusResponse(Response):
soapenv_namespace = 'http://www.w3.org/2003/05/soap-envelope'
def init_response_attributes(self, etree):
self.status = Statuses.ERRED
try:
self.details = etree.xpath(
'//soapenv:Text', namespaces={'soapenv': self.soapenv_namespace}
)[0].text
except (IndexError, lxml.etree.XMLSchemaError) as e:
raise ResponseParseError(
'Cannot parse error status response: %s. Response content: %s'
% (e, lxml.etree.tostring(etree))
)
class StatusRequest(Request):
url = '/MSSP/services/MSS_StatusPort'
template = """
<?xml version="1.0" encoding="UTF-8"?>
<soapenv:Envelope xmlns:soapenv="http://www.w3.org/2003/05/soap-envelope"
xmlns:xsd="http://www.w3.
|
EdisonAlgorithms/HackerRank
|
practice/data-structures/heap/find-median-1/find-median-1.py
|
Python
|
mit
| 1,239
| 0.008071
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Zeyuan Shang
# @Date: 2015-12-01 01:05:22
# @Last Modified by: Zeyuan Shang
# @Last Modified time: 2015-12-01 01:05:37
import heapq
class MedianFinder:
def __init__(self):
"""
Initialize your data structure here.
"""
self.smallHeap = []
self.largeHeap = []
def addNu
|
m(self, num):
"""
Adds a num into the data structure.
:type num: int
:rtype: void
"""
if len(self.smallHeap) == len(self.largeHeap):
heapq.heappush(self.largeHeap, -heapq.heappushpop(self.smallHeap, -num))
else:
|
heapq.heappush(self.smallHeap, -heapq.heappushpop(self.largeHeap, num))
def findMedian(self):
"""
Returns the median of current data stream
:rtype: float
"""
if len(self.smallHeap) == len(self.largeHeap):
return float(self.largeHeap[0] - self.smallHeap[0]) / 2
else:
return self.largeHeap[0]
if __name__ == "__main__":
n = int(raw_input())
mf = MedianFinder()
for i in range(n):
mf.addNum(int(raw_input()))
print mf.findMedian()
|
KuraudoTama/jenkins-events-handlers
|
event_handler.py
|
Python
|
apache-2.0
| 371
| 0.013477
|
import threading
import log
|
ging
import json
class EventHandler(threading.Thread):
log = logging.getLogger("events.EventHandler")
def __init__(self,event):
self.event=event.split(None)[0]
self.data = json.loads(event.lstrip(self.event).lstrip())
threading.Thread.__init__(self
|
, name="EventHandler for event: <%s>" % event)
|
stackforge/networking-bagpipe-l2
|
networking_bagpipe/tests/unit/agent/sfc/test_agent_extension.py
|
Python
|
apache-2.0
| 54,420
| 0
|
# Copyright (c) 2017 Orange.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from unittest import mock
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from networking_bagpipe.agent.sfc import agent_extension as bagpipe_agt_ext
from networking_bagpipe.bagpipe_bgp import constants as bbgp_const
from networking_bagpipe.driver import constants as sfc_const
from networking_bagpipe.objects import sfc as sfc_obj
from networking_bagpipe.tests.unit.agent import base
from neutron.api.rpc.callbacks.consumer import registry
from neutron.api.rpc.callbacks import events as rpc_events
from neutron.api.rpc.handlers import resources_rpc
from neutron.plugins.ml2.drivers.linuxbridge.agent.common \
import constants as lb_agt_constants
from neutron.plugins.ml2.drivers.linuxbridge.agent.linuxbridge_neutron_agent \
import LinuxBridgeManager
CHAIN_HOP_CLASSIFIER = {"protocol": "tcp",
"sourcePrefix": "1.2.3.4/32",
"destinationPort": "80",
"destinationPrefix": "5.6.7.8/32"}
CHAIN_HOP_REVERSE_CLASSIFIER = {"protocol": "tcp",
"sourcePrefix": "5.6.7.8/32",
"sourcePort": "80",
"destinationPrefix": "1.2.3.4/32"}
CHAIN_HOP_RT1000 = ['SFC_L3:1000']
CHAIN_HOP_EGRESS_TO_RT1002 = 'SFC_L3:1002'
CHAIN_HOP_EGRESS_PARAMS1 = {'readv_from_rts': ['SFC_L3:1001'],
'readv_to_rt': CHAIN_HOP_EGRESS_TO_RT1002,
'redirect_rts': ['SFC_L3:1003']}
CHAIN_HOP_RT2000 = ['SFC_L3:2000']
CHAIN_HOP_EGRESS_TO_RT2002 = 'SFC_L3:2002'
CHAIN_HOP_EGRESS_PARAMS2 = {'readv_from_rts': ['SFC_L3:2001'],
'readv_to_rt': CHAIN_HOP_EGRESS_TO_RT2002,
'redirect_rts': ['SFC_L3:2003']}
net_ports = {
base.NETWORK1['id']: [base.PORT10['id'], base.PORT11['id']],
base.NETWORK2['id']: [base.PORT20['id'], base.PORT21['id']]
}
class TestSfcAgentExtension(base.BaseTe
|
stLinuxBridgeAgentExtension):
agent_extension_class = bagpipe_agt_ext.BagpipeSfcAgentExtension
def setUp(self):
super(TestSfcAgentExtension, self).setUp()
|
self.mocked_bulk_rpc = mock.patch.object(
self.agent_ext._pull_rpc, 'bulk_pull').start()
@mock.patch.object(registry, 'register')
@mock.patch.object(resources_rpc, 'ResourcesPushRpcCallback')
def test_initialize_rpcs(self, rpc_mock, subscribe_mock):
self.agent_ext.initialize(self.connection,
lb_agt_constants.EXTENSION_DRIVER_TYPE)
self.connection.create_consumer.assert_has_calls(
[mock.call(
resources_rpc.resource_type_versioned_topic(resource_type),
[rpc_mock()],
fanout=True)
for resource_type in (
sfc_obj.BaGPipeChainHop.obj_name(),
sfc_obj.BaGPipePortHops.obj_name())],
any_order=True
)
subscribe_mock.assert_has_calls(
[
mock.call(mock.ANY, sfc_obj.BaGPipeChainHop.obj_name()),
mock.call(mock.ANY, sfc_obj.BaGPipePortHops.obj_name())
],
any_order=True
)
def _check_port_sfc_info(self, port_id, sides=None):
self.assertIn(port_id, self.agent_ext.ports_info)
port_info = self.agent_ext.ports_info[port_id]
if sides:
self.assertTrue(port_info.chain_hops)
for side in sides:
self.assertTrue(port_info.chain_hops[side])
hop_keys = list(sfc_obj.BaGPipeChainHop.fields)
if side == sfc_const.EGRESS:
hop_keys += ['lb_consistent_hash_order']
self.assertTrue(all(key in hop_keys for
key in list(port_info.chain_hops[side])))
else:
self.assertFalse(port_info.chain_hops)
def _fake_chain_hop(self, portchain_id, rts,
ingress_network, egress_network,
reverse_hop=False,
**chain_hop_params):
chain_hop = dict(
id=uuidutils.generate_uuid(),
portchain_id=portchain_id,
rts=rts,
ingress_gw=ingress_network['gateway_ip'],
egress_gw=egress_network['gateway_ip'],
ingress_ports=net_ports[ingress_network['id']],
egress_ports=net_ports[egress_network['id']],
reverse_hop=reverse_hop,
**chain_hop_params
)
return sfc_obj.BaGPipeChainHop(**chain_hop)
def _chain_hops_notif(self, chain_hops, event_type):
self.agent_ext.handle_sfc_chain_hops(
None, sfc_obj.BaGPipeChainHop.obj_name(),
chain_hops, event_type)
def _fake_port_hops(self, port_id, ingress_hops=None, egress_hops=None):
port_hops = dict(
port_id=port_id,
ingress_hops=ingress_hops if ingress_hops else [],
egress_hops=egress_hops if egress_hops else []
)
return sfc_obj.BaGPipePortHops(**port_hops)
def _port_hops_notif(self, port_hops, event_type):
self.agent_ext.handle_sfc_port_hops(
None, sfc_obj.BaGPipePortHops.obj_name(),
port_hops, event_type)
def test_chain_hop_before_port_up_ingress_only(self):
ingress_hop = self._fake_chain_hop(
portchain_id=uuidutils.generate_uuid(),
rts=CHAIN_HOP_RT1000,
ingress_network=base.NETWORK1,
egress_network=base.NETWORK2)
self.mocked_bulk_rpc.return_value = [ingress_hop]
# Verify build callback attachments
def check_build_cb(port_id):
linuxbr1 = LinuxBridgeManager.get_bridge_name(base.NETWORK1['id'])
self.assertDictEqual(
dict(
network_id=base.NETWORK1['id'],
ipvpn=[dict(
ip_address=base.PORT10['ip_address'],
mac_address=base.PORT10['mac_address'],
gateway_ip=base.NETWORK1['gateway_ip'],
local_port=dict(linuxif=linuxbr1),
import_rt=CHAIN_HOP_RT1000,
export_rt=[]
)]
),
self.agent_ext.build_sfc_attach_info(base.PORT10['id'])
)
# we need to check what build_sfc_attach_info returns, at the
# precise time when do_port_plug is called
self.mocked_bagpipe_agent.do_port_plug.side_effect = check_build_cb
self.agent_ext.handle_port(None, self._port_data(base.PORT10))
self.mocked_bagpipe_agent.do_port_plug.assert_has_calls(
[mock.call(base.PORT10['id'])]
)
# Verify attachments list consistency
self._check_network_info(base.NETWORK1['id'], 1)
self._check_port_sfc_info(base.PORT10['id'], [sfc_const.INGRESS])
def test_chain_hop_before_port_up_egress_only(self):
egress_params = copy.copy(CHAIN_HOP_EGRESS_PARAMS1)
egress_params.update(
dict(classifiers=jsonutils.dumps([CHAIN_HOP_CLASSIFIER]))
)
egress_hop = self._fake_chain_hop(
portchain_id=uuidutils.generate_uuid(),
rts=CHAIN_HOP_RT1000,
ingress_network=base.NETWORK2,
egress_network=base.NETWORK1,
**egress_params)
self.mocked_bulk_rpc.return_value = [egress_hop]
# Verify build callback attachments
def check_bui
|
lcgong/sqlblock
|
sqlblock/setup.py
|
Python
|
mit
| 372
| 0.002688
|
import sys
from sqlblock.postgres.connection import AsyncPostgresSQL
def aiohttp_set
|
up_sqlblock(app, conn: AsyncPostgresSQL):
async def startup(app):
await conn.__aenter__()
async def shutdown(app):
await conn.__aexit__(*sys.exc_info())
print('closed sqlblock')
app.on_startup.append(startup)
app.on_cleanup.append(shutdown)
|
|
xzturn/caffe2
|
caffe2/python/toy_regression_test.py
|
Python
|
apache-2.0
| 2,822
| 0
|
import numpy as np
import unittest
from caffe2.python import core, workspace, test_util
class TestToyRegression(test_util.TestCase):
def testToyRegression(self):
"""Tests a toy regression end to end.
The test code carries a simple toy regression in the form
y = 2.0 x1 + 1.5 x2 + 0.5
by randomly generating gaussian inputs and calculating the ground
truth outputs in the net as well. It uses a standard SGD to then
train the parameters.
"""
workspace.ResetWorkspace()
init_net = core.Net("init")
W = init_net.UniformFill([], "W", shape=[1, 2], min=-1., max=1.)
B = init_net.ConstantFill([], "B", shape=[1], value=0.0)
W_gt = init_net.GivenTensorFill(
[], "W_gt", shape=[1, 2], values=[2.0, 1.5])
B_gt = init_net.GivenTensor
|
Fill([], "B_gt", shape=[1], values=[0.5])
LR = init_net.ConstantFill([], "LR", shape=[1], value=-0.1)
ONE = init_net.ConstantFill([], "ONE", shape=[1], value=1.)
ITER = init_net.ConstantFill([], "ITER", shape=[1], value=0,
dtype=core.DataType.INT32)
train_net = core.Net("train")
X = train_net.Gaussia
|
nFill([], "X", shape=[64, 2], mean=0.0, std=1.0)
Y_gt = X.FC([W_gt, B_gt], "Y_gt")
Y_pred = X.FC([W, B], "Y_pred")
dist = train_net.SquaredL2Distance([Y_gt, Y_pred], "dist")
loss = dist.AveragedLoss([], ["loss"])
# Get gradients for all the computations above. Note that in fact we
# don't need to get the gradient the Y_gt computation, but we'll just
# leave it there. In many cases, I am expecting one to load X and Y
# from the disk, so there is really no operator that will calculate the
# Y_gt input.
input_to_grad = train_net.AddGradientOperators([loss], skip=2)
# updates
train_net.Iter(ITER, ITER)
train_net.LearningRate(ITER, "LR", base_lr=-0.1,
policy="step", stepsize=20, gamma=0.9)
train_net.WeightedSum([W, ONE, input_to_grad[str(W)], LR], W)
train_net.WeightedSum([B, ONE, input_to_grad[str(B)], LR], B)
for blob in [loss, W, B]:
train_net.Print(blob, [])
# the CPU part.
plan = core.Plan("toy_regression")
plan.AddStep(core.ExecutionStep("init", init_net))
plan.AddStep(core.ExecutionStep("train", train_net, 200))
workspace.RunPlan(plan)
W_result = workspace.FetchBlob("W")
B_result = workspace.FetchBlob("B")
np.testing.assert_array_almost_equal(W_result, [[2.0, 1.5]], decimal=2)
np.testing.assert_array_almost_equal(B_result, [0.5], decimal=2)
workspace.ResetWorkspace()
if __name__ == '__main__':
unittest.main()
|
bcb/qutebrowser
|
tests/integration/features/test_tabs.py
|
Python
|
gpl-3.0
| 1,049
| 0
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2016 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should
|
have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
import pytest_bdd as bdd
bdd.scenarios('tabs.feature')
@bdd.given("I clean up open tabs")
def clean_open_tabs(quteproc):
quteproc.set_setting('tabs', 'last-close', 'blank')
quteproc.send_
|
cmd(':tab-only')
quteproc.send_cmd(':tab-close')
|
BogdanWDK/ajaxbot
|
src/files/isup.py
|
Python
|
gpl-2.0
| 433
| 0.013857
|
#!/usr/bin/env python
import re
import sys
fr
|
om urllib import urlopen
def isup(domain):
resp = urlopen("http://www.isup.me/%s" % domain).read()
return "%s
|
" % ("UP" if re.search("It's just you.", resp,
re.DOTALL) else "DOWN")
if __name__ == '__main__':
if len(sys.argv) > 1:
print "\n".join(isup(d) for d in sys.argv[1:])
else:
print "usage: %s domain1 [domain2 .. domainN]" % sys.argv[0]
|
abstract-open-solutions/l10n-italy
|
l10n_it_ateco/__openerp__.py
|
Python
|
agpl-3.0
| 1,563
| 0
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Ab
|
stract
# (<http://abstract.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or
|
FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Ateco codes",
"version": "8.0.1.0.0",
"category": "Localisation/Italy",
"description": """Italian Localisation module - Ateco codes
Funcionalities:
- Add Ateco codes model
- Reference Ateco codes to partner model
""",
"author": "Abstract,Odoo Community Association (OCA)",
"website": "http://abstract.it",
"license": "AGPL-3",
"depends": [
"base"
],
"data": [
"security/ir.model.access.csv",
"view/ateco_view.xml",
"view/partner_view.xml",
"data/ateco_data.xml"
],
"active": False,
"installable": True
}
|
benguillet/wmr-frontend
|
lib/thrift/Thrift.py
|
Python
|
apache-2.0
| 3,530
| 0.027479
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
class TType:
STOP = 0
VOID = 1
BOOL = 2
BYTE = 3
I08 = 3
DOUBLE = 4
I16 = 6
I32 = 8
I64 = 10
STRING = 11
UTF7 = 11
STRUCT = 12
MAP = 13
SET = 14
LIST = 15
UTF8 = 16
UTF16 = 17
class TMessageType:
CALL = 1
REPLY = 2
EXCEPTION = 3
ONEWAY = 4
class TProcessor:
"""Base class for procsessor, which works on two streams."""
def process(iprot, oprot):
pass
class TException(Exception):
"""Base class for all thrift exceptions."""
# BaseException.message is deprecated in Python v[2.6,3.0)
if (2,6,0) <= sys.version_info < (3,0):
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def __init__(self, message=None):
Exception.__init__(self, message)
self.message = message
class TApplicationException(TException):
"""Application level thrift exceptions."""
UNKNOWN = 0
UNKNOWN_METHOD = 1
INVALID_MESSAGE_TYPE = 2
WRONG_METHOD_NAME = 3
BAD_SEQUENCE_ID = 4
MISSING_RESULT = 5
INTERNAL_ERROR = 6
PROTOCOL_ERROR = 7
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
def __str__(self):
if self.message:
return self.message
elif self.type == self.UNKNOWN_METHOD:
return 'Unknown method'
elif self.type == self.INVALID_MESSAGE_TYPE:
return 'Invalid message type'
elif self.type == self.WRONG_METHOD_NAME:
return 'Wrong method name'
elif self.type == self.BAD_SEQUENCE_ID:
return 'Bad sequence ID'
elif self.type == self.MISSING_RESULT:
return 'Missing result'
else:
return 'Default (unknown) TApplicationException'
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString();
else:
iprot.skip(ftype)
e
|
lif fid == 2:
|
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
oprot.writeStructBegin('TApplicationException')
if self.message != None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message)
oprot.writeFieldEnd()
if self.type != None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
|
MSLNZ/msl-qt
|
msl/examples/qt/show_standard_icons.py
|
Python
|
mit
| 5,135
| 0.000584
|
"""
Display all the icons available in :obj:`QtWidgets.QStyle.StandardPixmap` and in
the *standard* Windows DLL/EXE files.
"""
from msl.qt import (
QtWidgets,
QtCore,
application,
convert
)
try:
# check if pythonnet is installed
import clr
has_clr = True
except ImportError:
has_clr = False
class ShowStandardIcons(object):
def __init__(self):
app = application()
self.tab_widget = QtWidgets.QTabWidget()
self.main_window = QtWidgets.QMainWindow()
self.main_window.setWindowTitle('Standard Icons')
self.main_window.setCentralWidget(self.tab_widget)
self.main_window.closeEvent = self.close_event
# add a progress bar to the status bar
self.progress_bar = QtWidgets.QProgressBar(self.main_window.statusBar())
self.progress_bar.setAlignment(QtCore.Qt.AlignCenter)
self.main_window.statusBar().addPermanentWidget(self.progress_bar)
self.main_window.showMaximized()
self.num_icons = 0
self.file_index = 0
self.zoom_widget = QtWidgets.QDialog()
self.zoom_widget.setSizeGripEnabled(True)
self.zoom_widget.resize(QtCore.QSize(256, 256))
self.zoom_widget.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
vbox = QtWidgets.QVBoxLayout()
self.zoom_label = QtWidgets.QLabel()
self.zoom_label.setScaledContents(True)
vbox.addWidget(self.zoom_label)
self.zoom_widget.setLayout(vbox)
qt_icons = [sp for sp in dir(QtWidgets.QStyle) if sp.startswith('SP_')]
self.windows_files = [
'accessibilitycpl',
'compstui',
'ddores',
'dmdskres',
'explorer',
'gameux',
'ieframe',
'imageres',
'mmcndmgr',
'mmres',
'moricons',
'netcenter',
'netshell',
'networkexplorer',
'pifmgr',
'pnidui',
'sensorscpl',
'setupapi',
'shell32',
'wmploc',
'wpdshext'
]
self.num_files = 1 + len(self.windows_files)
self.progress_bar.setRange(0, self.num_files)
self.add_qt_tab('Qt Icons', qt_icons)
if has_clr:
self.windows_index = 0
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.add_windows_tab)
self.timer.start(0)
else:
self.update_message('Loaded {} icons.'.format(self.num_icons))
self.progress_bar.hide()
app.exec()
def add_qt_tab(self, label, icons):
"""Add the Qt icons."""
tab = QtWidgets.QWidget()
self.tab_widget.addTab(tab, label)
layout = QtWidgets.QGridLayout()
self.update_message('Loading Qt icons...')
count = 0
num_cols = 4
for i in icons:
button = QtWidgets.QPushButton(i)
ico = convert.to_qicon(getattr(QtWidgets.QStyle, i))
button.
|
setIcon(ico)
button.clicked.c
|
onnect(lambda *args, ic=ico, n=i: self.zoom(ic, n))
layout.addWidget(button, count // num_cols, count % num_cols)
count += 1
self.num_icons += 1
tab.setLayout(layout)
self.file_index += 1
self.progress_bar.setValue(self.file_index)
def add_windows_tab(self):
"""Add the icons from the Windows DLL and EXE files."""
num_cols = 16
filename = self.windows_files[self.windows_index]
self.update_message('Loading icons from {}...'.format(filename))
tab = QtWidgets.QWidget()
self.tab_widget.addTab(tab, filename)
layout = QtWidgets.QGridLayout()
index = 0
while True:
button = QtWidgets.QPushButton(str(index))
try:
name = '{}|{}'.format(filename, str(index))
ico = convert.to_qicon(name)
except OSError:
break
button.setIcon(ico)
button.clicked.connect(lambda *args, ic=ico, n=name: self.zoom(ic, n))
layout.addWidget(button, index // num_cols, index % num_cols)
index += 1
self.num_icons += 1
self.file_index += 1
self.progress_bar.setValue(self.file_index)
tab.setLayout(layout)
self.windows_index += 1
if self.windows_index == len(self.windows_files):
self.timer.stop()
self.update_message('Loaded {} icons.'.format(self.num_icons))
self.progress_bar.hide()
def update_message(self, text):
self.main_window.statusBar().showMessage(text)
def zoom(self, ico, name):
self.zoom_widget.setWindowTitle(name)
self.zoom_label.setPixmap(ico.pixmap(self.zoom_widget.size()))
self.zoom_widget.setWindowState(QtCore.Qt.WindowActive)
self.zoom_widget.activateWindow()
self.zoom_widget.show()
def close_event(self, event):
self.zoom_widget.close()
event.accept()
if __name__ == '__main__':
ShowStandardIcons()
|
awalls-cx18/gnuradio
|
gr-uhd/examples/python/freq_hopping.py
|
Python
|
gpl-3.0
| 9,480
| 0.001793
|
#!/usr/bin/env python
#
# Copyright 2014,2019 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
TXs a waveform (either from a file, or a sinusoid) in a frequency-hopping manner.
"""
from __future__ import print_function
import argparse
import numpy
import pmt
from gnuradio import gr
from gnuradio import blocks
from gnuradio import uhd
def setup_parser():
""" Setup the parser for the frequency hopper. """
parser = argparse.ArgumentParser(
description="Transmit a signal in a frequency-hopping manner, using tx_freq tags."
)
parser.add_argument(
'-i', '--input-file',
help="File with samples to transmit. If left out, will transmit a sinusoid.")
parser.add_argument(
"-a", "--args", default="",
help="UHD device address args.")
parser.add_argument(
"--spec", default="",
help="UHD subdev spec.")
parser.add_argument(
"--antenna", default="",
help="UHD antenna settings.")
parser.add_argument(
"--gain", default=None, type=float,
help="USRP gain (defaults to mid-point in dB).")
parser.add_argument(
"-r", "--rate", type=float, default=1e6,
help="Sampling rate")
parser.add_argument(
"-N", "--samp-per-burst", type=int, default=10000,
help="Samples per burst")
parser.add_argument(
"-t", "--hop-time", type=float, default=1000,
help="Time between hops in milliseconds. This must be larger than or "
"equal to the burst duration as set by --samp-per-burst")
parser.add_argument(
"-f", "--freq", type=float, default=2.45e9,
help="Base frequency. This is the middle channel frequency at which "
"the USRP will Tx.")
parser.add_argument(
"--dsp", action='store_true',
help="DSP tuning only.")
parser.add_argument(
"-d", "--freq-delta", type=float, default=1e6,
help="Channel spacing.")
parser.add_argument(
"-c", "--num-channels", type=int, default=5,
help="Number of channels.")
parser.add_argument(
"-B", "--num-bursts", type=int, default=30,
help="Number of bursts to transmit before terminating.")
parser.add_argument(
"-p", "--post-tuning", action='count',
help="Tune after transmitting. Default is to tune immediately before "
"transmitting.")
parser.add_argument(
"-v", "--verbose", action='count',
help="Print more information. The morer the printier.")
return parser
|
class FrequencyHopperSrc(gr.hier_block2):
""" Provides tags for frequency hopping """
def __init__(
self,
n_bursts, n_channels,
freq_delta, base_freq, dsp_tuning,
|
burst_length, base_time, hop_time,
post_tuning=False,
tx_gain=0,
verbose=False
):
gr.hier_block2.__init__(
self, "FrequencyHopperSrc",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_gr_complex),
)
n_samples_total = n_bursts * burst_length
lowest_frequency = base_freq - numpy.floor(n_channels/2) * freq_delta
self.hop_sequence = [lowest_frequency + n * freq_delta for n in range(n_channels)]
numpy.random.shuffle(self.hop_sequence)
# Repeat that:
self.hop_sequence = [self.hop_sequence[x % n_channels] for x in range(n_bursts)]
if verbose:
print("Hop Frequencies | Hop Pattern")
print("=================|================================")
for f in self.hop_sequence:
print("{:6.3f} MHz | ".format(f/1e6), end='')
if n_channels < 50:
print(" " * int((f - base_freq) / freq_delta) + "#")
else:
print("\n")
print("=================|================================")
# There's no real point in setting the gain via tag for this application,
# but this is an example to show you how to do it.
gain_tag = gr.tag_t()
gain_tag.offset = 0
gain_tag.key = pmt.string_to_symbol('tx_command')
gain_tag.value = pmt.to_pmt({'gain': tx_gain})
tag_list = [gain_tag,]
for i in range(len(self.hop_sequence)):
tune_tag = gr.tag_t()
tune_tag.offset = i * burst_length
# TODO dsp_tuning should also be able to do post_tuning
if i > 0 and post_tuning and not dsp_tuning:
tune_tag.offset -= 1 # Move it to last sample of previous burst
if dsp_tuning:
tune_tag.key = pmt.string_to_symbol('tx_command')
tune_tag.value = pmt.to_pmt({'lo_freq': base_freq, 'dsp_freq': base_freq - self.hop_sequence[i]})
else:
tune_tag.key = pmt.string_to_symbol('tx_freq')
tune_tag.value = pmt.to_pmt(self.hop_sequence[i])
tag_list.append(tune_tag)
length_tag = gr.tag_t()
length_tag.offset = i * burst_length
length_tag.key = pmt.string_to_symbol('packet_len')
length_tag.value = pmt.from_long(burst_length)
tag_list.append(length_tag)
time_tag = gr.tag_t()
time_tag.offset = i * burst_length
time_tag.key = pmt.string_to_symbol('tx_time')
time_tag.value = pmt.make_tuple(
pmt.from_uint64(int(base_time + i * hop_time)),
pmt.from_double((base_time + i * hop_time) % 1),
)
tag_list.append(time_tag)
tag_source = blocks.vector_source_c((1.0,) * n_samples_total, repeat=False, tags=tag_list)
mult = blocks.multiply_cc()
self.connect(self, mult, self)
self.connect(tag_source, (mult, 1))
class FlowGraph(gr.top_block):
""" Flow graph that does the frequency hopping. """
def __init__(self, args):
gr.top_block.__init__(self)
if args.input_file is not None:
src = blocks.file_source(gr.sizeof_gr_complex, args.filename, repeat=True)
else:
src = blocks.vector_source_c((.5,) * int(1e6) * 2, repeat=True)
# Setup USRP
self.usrp = uhd.usrp_sink(
args.args,
uhd.stream_args('fc32'),
"packet_len"
)
if args.spec:
self.usrp.set_subdev_spec(args.spec, 0)
if args.antenna:
self.usrp.set_antenna(args.antenna, 0)
self.usrp.set_samp_rate(args.rate)
# Gain is set in the hopper block
if not args.gain:
gain_range = self.usrp.get_gain_range()
args.gain = float(gain_range.start() + gain_range.stop()) / 2.0
print("-- Setting gain to {} dB".format(args.gain))
if not self.usrp.set_center_freq(args.freq):
print('[ERROR] Failed to set base frequency.')
exit(1)
hopper_block = FrequencyHopperSrc(
args.num_bursts, args.num_channels,
args.freq_delta, args.freq, args.dsp,
args.samp_per_burst, 1.0, args.hop_time / 1000.,
args.post_tuning,
args.gain,
args.verbose,
)
self.connect(src, hopper_block, self.usrp)
def print_hopper_stats(args):
""" Nothing to do with Grace Hopper """
print("""
Parameter | Value
===================+=======================
|
alfasin/st2
|
st2actions/st2actions/utils/param_utils.py
|
Python
|
apache-2.0
| 14,817
| 0.002767
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
import six
from jinja2 import Template, Environment, StrictUndefined, meta, exceptions
from st2common import log as logging
from st2common.constants.action import ACTION_KV_PREFIX
from st2common.constants.system import SYSTEM_KV_PREFIX
from st2common.exceptions import actionrunner
from st2common.services.keyvalues import KeyValueLookup
from st2common.util.casts import get_cast
from st2common.util.compat import to_unicode
LOG = logging.getLogger(__name__)
__all__ = [
'get_resolved_params',
'get_rendered_params',
'get_finalized_params',
]
def _split_params(runner_parameters, action_parameters, mixed_params):
def pf(params, skips):
result = {k: v for k, v in six.iteritems(mixed_params)
if k in params and k not in skips}
return result
return (pf(runner_parameters, {}), pf(action_parameters, runner_parameters))
def _get_resolved_runner_params(runner_parameters, action_parameters,
actionexec_runner_parameters):
# Runner parameters should use the defaults from the RunnerType object.
# The runner parameter defaults may be overridden by values provided in
# the Action and liveaction.
# Create runner parameter by merging default values with dynamic values
resolved_params = {k: v['default'] if 'default' in v else None
for k, v in six.iteritems(runner_parameters)}
# pick overrides from action_parameters & actionexec_runner_parameters
for param_name, param_value in six.iteritems(runner_parameters):
# No override if param is immutable
if param_value.get('immutable', False):
continue
# Check if param exists in action_parameters and if it has a default value then
# pickup the override.
if param_name in action_parameters:
action_param = action_parameters[param_name]
if 'default' in action_param:
resolved_params[param_name] = action_param['default']
# No further override (from liveaction) if param is immutable
if action_param.get('immutable', False):
continue
# Finally pick up override from actionexec_runner_parameters
if param_name in actionexec_runner_parameters:
resolved_params[param_name] = actionexec_runner_parameters[param_name]
return resolved_params
def _get_resolved_action_params(runner_parameters, action_parameters,
actionexec_action_parameters):
# Create action parameters by merging default values with dynamic values
resolved_params = {k: v['default'] if 'default' in v else None
for k, v in six.iteritems(action_parameters)
if k not in runner_parameters}
# pick overrides from actionexec_action_parameters
for param_name, param_value in six.iteritems(action_parameters):
# No override if param is immutable
if param_value.get('immutable', Fals
|
e):
continue
if param_name in actionexec_action_parameters and param_name not in runner_parameters:
resolved_params[param_name] = actionexec_action_parameters[param_name]
return resolved_params
|
def get_resolved_params(runnertype_parameter_info, action_parameter_info, actionexec_parameters):
'''
Looks at the parameter values from runner, action and action execution to fully resolve the
values. Resolution is the process of determinig the value of a parameter by taking into
consideration default, immutable and user supplied values.
'''
# Runner parameters should use the defaults from the RunnerType object.
# The runner parameter defaults may be overridden by values provided in
# the Action and liveaction.
actionexec_runner_parameters, actionexec_action_parameters = _split_params(
runnertype_parameter_info, action_parameter_info, actionexec_parameters)
runner_params = _get_resolved_runner_params(runnertype_parameter_info,
action_parameter_info,
actionexec_runner_parameters)
action_params = _get_resolved_action_params(runnertype_parameter_info,
action_parameter_info,
actionexec_action_parameters)
return runner_params, action_params
def _is_template(template_str):
template_str = to_unicode(template_str)
template = Template(template_str)
try:
return template_str != template.render({})
except exceptions.UndefinedError:
return True
def _renderable_context_param_split(action_parameters, runner_parameters, base_context=None):
# To render the params it is necessary to combine the params together so that cross
# parameter category references are resolved.
renderable_params = {}
# shallow copy since this will be updated
context_params = copy.copy(base_context) if base_context else {}
def do_render_context_split(source_params):
'''
Will split the supplied source_params into renderable_params and context_params. As part of
the split also makes sure that the all params are essentially strings.
'''
for k, v in six.iteritems(source_params):
renderable_v = v
# dict and list to be converted to str
if isinstance(renderable_v, dict) or isinstance(renderable_v, list):
renderable_v = json.dumps(renderable_v)
# only str can contain templates
if (isinstance(renderable_v, str) or isinstance(renderable_v, unicode)) and \
_is_template(renderable_v):
renderable_params[k] = renderable_v
elif isinstance(v, dict) or isinstance(v, list):
# For context use the renderable value for dict and list params. The template
# rendering by jinja yields a non json.loads compatible value leading to issues
# while performing casts.
context_params[k] = renderable_v
else:
# For context use the original value.
context_params[k] = v
do_render_context_split(action_parameters)
do_render_context_split(runner_parameters)
return (renderable_params, context_params)
def _check_availability(param, param_dependencies, renderable_params, context):
for dependency in param_dependencies:
if dependency not in renderable_params and dependency not in context:
return False
return True
def _check_cyclic(dep_chain, dependencies):
last_idx = len(dep_chain) - 1
last_value = dep_chain[last_idx]
for dependency in dependencies.get(last_value, []):
if dependency in dep_chain:
dep_chain.append(dependency)
return False
dep_chain.append(dependency)
if not _check_cyclic(dep_chain, dependencies):
return False
dep_chain.pop()
return True
def _validate_dependencies(renderable_params, context):
'''
Validates dependencies between the parameters.
e.g.
{
'a': '{{b}}',
'b': '{{a}}'
}
In this example 'a' requires 'b' for template rendering and vice-versa. There is no way for
these templates to be rende
|
argriffing/cvxpy
|
cvxpy/atoms/norm1.py
|
Python
|
gpl-3.0
| 735
| 0
|
"""
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it w
|
ill be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a cop
|
y of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
from cvxpy.atoms.pnorm import pnorm
def norm1(x):
return pnorm(x, 1)
|
hainm/pythran
|
pythran/analyses/imported_ids.py
|
Python
|
bsd-3-clause
| 2,844
| 0
|
""" ImportedIds gathers identifiers imported by a node. """
from pythran.analyses.globals_analysis import Globals
from pythran.analyses.locals_analysis import Locals
from pythran.passmanager import NodeAnalysis
import pythran.metadata as md
import ast
class ImportedIds(NodeAnalysis):
"""Gather ids referenced by a node and not declared locally."""
def __init__(self):
self.result = set()
self.current_locals = set()
self.is_list = False
self.in_augassign = False
super(ImportedIds, self).__init__(Globals, Locals)
def visit_Name(self, node):
if isinstance(node.ctx, ast.Store) and not self.in_augassign:
self.current_locals.add(node.id)
elif (node.id not in self.visible_globals and
node.id not in self.current_locals):
self.result.add(node.id)
def visit_FunctionDef(self, node):
self.current_locals.add(node.name)
current_locals = self.current_locals.c
|
opy()
self.current_locals.update(arg.id for arg in node.args.args)
map(self.visit, node.body)
self.current_locals = current_locals
def visit_AnyComp(self, node):
current_locals = self.current_loca
|
ls.copy()
map(self.visit, node.generators)
self.visit(node.elt)
self.current_locals = current_locals
visit_ListComp = visit_AnyComp
visit_SetComp = visit_AnyComp
visit_DictComp = visit_AnyComp
visit_GeneratorExp = visit_AnyComp
def visit_Assign(self, node):
# order matter as an assignation
# is evaluted before being assigned
md.visit(self, node)
self.visit(node.value)
map(self.visit, node.targets)
def visit_AugAssign(self, node):
self.in_augassign = True
self.generic_visit(node)
self.in_augassign = False
def visit_Lambda(self, node):
current_locals = self.current_locals.copy()
self.current_locals.update(arg.id for arg in node.args.args)
self.visit(node.body)
self.current_locals = current_locals
def visit_Import(self, node):
self.current_locals.update(alias.name for alias in node.names)
def visit_ImportFrom(self, node):
self.current_locals.update(alias.name for alias in node.names)
def visit_Attribute(self, node):
pass
def prepare(self, node, ctx):
super(ImportedIds, self).prepare(node, ctx)
if self.is_list: # so that this pass can be called on list
node = node.body[0]
self.visible_globals = set(self.globals) - self.locals[node]
def run(self, node, ctx):
if isinstance(node, list): # so that this pass can be called on list
self.is_list = True
node = ast.If(ast.Num(1), node, [])
return super(ImportedIds, self).run(node, ctx)
|
mobiusklein/brainpy
|
brainpy/__init__.py
|
Python
|
apache-2.0
| 1,839
| 0.005982
|
'''
A Python Implementation of the Baffling Recursive Algorithm for Isotopic cluster distributioN
'''
import os
from .brainpy import (isotopic_variants, IsotopicDistribution, periodic_table,
max_variants, calculate_mass, neutral_mass, mass_charge_ratio,
PROTON, _has_c, Peak)
from .composition import parse_formula, PyComposition
SimpleComposition = PyComposition
def get_include():
"""Retrieve the path to compiled C extensions' source files to make linking simple.
This module contains two variants of the algorithm reimplimented using C and the Python-C API.
The `_speedup` module is a direct translation of the pure Python implementation using Cython,
using static typing and `cdef clas
|
s` versions of the existing classes. As this implementation still
spends a substantial amount of time in Python-space, it is slower than the option below, but is more
straight-forward to manipu
|
late from Python.
The `_c` module is a complete rewrite of the algorithm directly in C, using Python only to load
mass configuration information and is fully usable. It exports an entrypoint function to Python
which replaces the :func:`isotopic_variants` function when available. Because almost all of the
action happens in C here, it's not possible to run individual parts of the process directly from
Python.
"""
return os.path.join(__path__[0], "_c")
if _has_c:
from .brainpy import _IsotopicDistribution
__author__ = "Joshua Klein & Han Hu"
__all__ = [
"isotopic_variants", "IsotopicDistribution", "periodic_table",
"max_variants", "calculate_mass", "neutral_mass", "mass_charge_ratio",
"PROTON", "_has_c", "Peak",
"parse_formula", "PyComposition", "SimpleComposition",
"_IsotopicDistribution",
"get_include"
]
|
davidfoerster/schema-matching
|
src/schema_matching/collector/probability.py
|
Python
|
mit
| 748
| 0.032086
|
from .base import ItemCollector
class BaseProbabilityCollector(ItemCollector):
# result_dependencies = (*CountCollector, *FrequencyCollector)
def __init__(self, previous_collector_set):
super().__init__(previous_collector_set)
self.__cached_result = None
def get_result(self, collector_set):
if self.
|
__cached_result is None:
self.__cached_result = \
collector_set[self.result_dependencies[1]].get_result(collector_set) \
.normalize(collector_set[self.result_dependencies[0]] \
.get_result(collector_set))
return
|
self.__cached_result
def as_str(self, collector_set, number_fmt=''):
return format(self.get_result(collector_set), number_fmt)
@staticmethod
def result_norm(a, b):
return a.distance_to(b)
|
darksteelcode/authorship
|
features/base.py
|
Python
|
gpl-3.0
| 1,482
| 0.006748
|
import numpy as np
#numpy is used for later classifiers
#Note: this is just a template with all required methods
#text is the text represented as a string
#textName is optional, indicate sthe name of the text, used for debug
#args are aditional arguments for the feature calculator
#debug indicates wheter to display debug info
#f is features
class BaseFeatur
|
e():
def __init__(self, text, textName="", args=[], debug=True):
self.text = text.lower()
self.args = args
self.debug = debug
self.textName = textName
#Features, not yet
|
calculated
self.f = np.array([])
def debugStart(self):
if self.debug:
print "--BaseFeatures--"
def beginCalc(self):
if self.debug:
print "Feature calculation begining on " + self.textName
print "------"
def endCalc(self):
if self.debug:
print "Feature calculation finished on " + self.textName
print "Features Calculated:"
print self.f
print
def calc(self):
self.debugStart()
self.beginCalc()
#Calculations go here
self.endCalc()
return self.f
def getFeatures(self):
return self.f
def setText(self, text):
if self.debug:
print self.textName + "'s text set."
self.text = text.lower()
def setName(self, name):
if self.debug:
print "Name set to: " + self.textName
|
GoogleCloudPlatform/solutions-google-compute-engine-cluster-for-hadoop
|
sample/shortest-to-longest-reducer.py
|
Python
|
apache-2.0
| 2,023
| 0.007909
|
#!/usr/bin/env python
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reducer sample.
The word is already sorted in the desirable order.
The reducer counts the occurrence of each word and outputs the word
and its occurrence.
"""
import sys
class Word(object):
"""Class to keep current word's occurrence."""
def __init__(self, word):
self.word = word
self.count = 0
def Print(self):
print '%s\t%d' % (self.word, self.count)
def Increment(self, count=1):
self.count += count
class ShortestToLongestReducer(object):
"""Class to accumulate counts from reducer input lines."""
def __init__(self):
self.current_word = None
def PrintCurrentWord(self):
"""Outputs word count of the currently processing word."""
if self.current_word:
self.current_word.Print()
def ProcessLine(self, line):
"""Process an
|
input line.
Args:
line: Input line.
"""
# Split input to key and value.
key = line.split('\t', 1)[0]
# Split key to word-length and word.
word = key.split(':', 1)[1]
if not self.current_word:
self.current_word = Word(word)
elif self.current_word.word != word:
self.current_word.Print()
self.current_word = Word(word)
self.current_word.Increment()
def main(input_lines):
reducer = Shor
|
testToLongestReducer()
for line in input_lines:
reducer.ProcessLine(line)
reducer.PrintCurrentWord()
if __name__ == '__main__':
main(sys.stdin)
|
mishravikas/geonode-cas
|
geonode/contrib/groups/tests.py
|
Python
|
gpl-3.0
| 15,540
| 0.00251
|
import json
from django.contrib.auth import get_backends
from django.contrib.auth.models import User, AnonymousUser
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from geonode.contrib.groups.models import Group, GroupInvitation
from geonode.documents.models import Document
from geonode.layers.models import Layer
from geonode.layers.views import LAYER_LEV_NAMES
from geonode.maps.models import Map
from geonode.base.populate_test_data import create_models
from geonode.security.enumerations import ANONYMOUS_USERS, AUTHENTICATED_USERS
from geonode.security.views import _perms_info
class SmokeTest(TestCase):
"Basic checks to make sure pages load, etc."
fixtures = ["group_test_data"]
def setUp(self):
create_models(type='layer')
create_models(type='map')
create_models(type='document')
self.norman = User.objects.get(username="norman")
self.bar = Group.objects.get(slug='bar')
def test_group_permissions_extend_to_user(self):
"""
Ensures that when a user is in a group, the group permissions
extend to the user.
"""
layer = Layer.objects.all()[0]
backend = get_backends()[0]
# Set the default permissions
layer.set_default_permissions()
# Test that LEVEL_READ is set for ANONYMOUS_USERS and AUTHENTICATED_USERS
self.assertEqual(layer.get_gen_level(ANONYMOUS_USERS), layer.LEVEL_READ)
self.assertEqual(layer.get_gen_level(AUTHENTICATED_USERS), layer.LEVEL_READ)
# Test that the default perms give Norman view permissions but not write permissions
read_perms = backend.objects_with_perm(self.norman, 'layers.view_layer', Layer)
write_perms = backend.objects_with_perm(self.norman, 'layers.change_layer',
|
Layer)
self.assertTrue(layer.id in read_perms)
self.assertTrue(layer.id not in write_perms)
# Make sure Norman is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.norman))
# Add norman to the bar group.
self.bar.join(self.norman)
# Ensure Norman is in the bar group.
self.assertTrue(self.bar.user_is_memb
|
er(self.norman))
# Test that the bar group has default permissions on the layer
bar_read_perms = backend.objects_with_perm(self.bar, 'layers.view_layer', Layer)
bar_write_perms = backend.objects_with_perm(self.bar, 'layers.change_layer', Layer)
self.assertTrue(layer.id in bar_read_perms)
self.assertTrue(layer.id not in bar_write_perms)
# Give the bar group permissions to change the layer.
layer.set_group_level(self.bar, Layer.LEVEL_WRITE)
bar_read_perms = backend.objects_with_perm(self.bar, 'layers.view_layer', Layer)
bar_write_perms = backend.objects_with_perm(self.bar, 'layers.change_layer', Layer)
self.assertTrue(layer.id in bar_read_perms)
self.assertTrue(layer.id in bar_write_perms)
# Test that the bar group perms give Norman view and change permissions
read_perms = backend.objects_with_perm(self.norman, 'layers.view_layer', Layer)
write_perms = backend.objects_with_perm(self.norman, 'layers.change_layer', Layer)
self.assertTrue(layer.id in read_perms)
self.assertTrue(layer.id in write_perms)
def test_group_resource(self):
"""
Tests the resources method on a Group object.
"""
layer = Layer.objects.all()[0]
map = Map.objects.all()[0]
# Give the self.bar group write perms on the layer
layer.set_group_level(self.bar, Layer.LEVEL_WRITE)
map.set_group_level(self.bar, Map.LEVEL_WRITE)
# Ensure the layer is returned in the group's resources
self.assertTrue(layer in self.bar.resources())
self.assertTrue(map in self.bar.resources())
# Test the resource filter
self.assertTrue(layer in self.bar.resources(resource_type=Layer))
self.assertTrue(map not in self.bar.resources(resource_type=Layer))
# Revoke permissions on the layer from the self.bar group
layer.set_group_level(self.bar, Layer.LEVEL_NONE)
# Ensure the layer is no longer returned in the groups resources
self.assertFalse(layer in self.bar.resources())
def test_perms_info(self):
"""
Tests the perms_info function (which passes permissions to the response context).
"""
# Add test to test perms being sent to the front end.
layer = Layer.objects.all()[0]
perms_info = _perms_info(layer, LAYER_LEV_NAMES)
# Ensure there is no group info for the layer object by default
self.assertEqual(dict(), perms_info['groups'])
# Add the foo group to the layer object groups
layer.set_group_level(self.bar, Layer.LEVEL_READ)
perms_info = _perms_info(layer, LAYER_LEV_NAMES)
# Ensure foo is in the perms_info output
self.assertDictEqual(perms_info['groups'], {u'bar': u'layer_readonly'})
def test_resource_permissions(self):
"""
Tests that the client can get and set group permissions through the test_resource_permissions view.
"""
c = Client()
self.assertTrue(c.login(username="admin", password="admin"))
layer = Layer.objects.all()[0]
document = Document.objects.all()[0]
map_obj = Map.objects.all()[0]
objects = layer, document, map_obj
for obj in objects:
response = c.get(reverse('resource_permissions', kwargs=dict(type=obj.polymorphic_ctype.model, resource_id=obj.id)))
self.assertEqual(response.status_code, 200)
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(permissions, str):
permissions = json.loads(permissions)
# Ensure the groups value is empty by default
self.assertDictEqual(permissions.get('groups'), dict())
permissions = {"anonymous": "_none", "authenticated": "_none", "users": [["admin", obj.LEVEL_WRITE]],
"groups": [[self.bar.slug, obj.LEVEL_WRITE]]}
# Give the bar group permissions
response = c.post(reverse('resource_permissions', kwargs=dict(type=obj.polymorphic_ctype.model, resource_id=obj.id)),
data=json.dumps(permissions), content_type="application/json")
self.assertEqual(response.status_code, 200)
response = c.get(reverse('resource_permissions', kwargs=dict(type=obj.polymorphic_ctype.model, resource_id=obj.id)))
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(permissions, str):
permissions = json.loads(permissions)
# Make sure the bar group now has write permissions
self.assertDictEqual(permissions['groups'], {'bar': obj.LEVEL_WRITE})
# Remove group permissions
permissions = {"anonymous": "_none", "authenticated": "_none", "users": [["admin", obj.LEVEL_WRITE]],
"groups": {}}
# Update the object's permissions to remove the bar group
response = c.post(reverse('resource_permissions', kwargs=dict(type=obj.polymorphic_ctype.model, resource_id=obj.id)),
data=json.dumps(permissions), content_type="application/json")
self.assertEqual(response.status_code, 200)
response = c.get(reverse('resource_permissions', kwargs=dict(type=obj.polymorphic_ctype.model, resource_id=obj.id)))
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(permissions, str):
permissions = json.loads(permissions)
# Assert the bar group no longer has permissions
self.assertDictEqual(permissions['groups'], {})
d
|
JazzeYoung/VeryDeepAutoEncoder
|
pylearn2/pylearn2/datasets/hdf5_deprecated.py
|
Python
|
bsd-3-clause
| 13,414
| 0
|
"""
Objects for datasets serialized in HDF5 format (.h5).
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "3-clause BSD"
try:
import h5py
except ImportError:
h5py = None
import numpy as np
from theano.compat.six.moves import xrange
import warnings
from pylearn2.datasets.dense_design_matrix import (DenseDesignMatrix,
DefaultViewConverter)
from pylearn2.space import CompositeSpace, VectorSpace, IndexSpace
from pylearn2.utils.iteration import FiniteDatasetIterator, safe_izip
from pylearn2.utils import contains_nan
class HDF5DatasetDeprecated(DenseDesignMatrix):
"""
Dense dataset loaded from an HDF5 file.
Parameters
----------
filename : str
HDF5 file name.
X : str, optional
Key into HDF5 file for dataset design matrix.
topo_view: str, optional
Key into HDF5 file for topological view of dataset.
y : str, optional
Key into HDF5 file for dataset targets.
load_all : bool, optional (default False)
If true, datasets are loaded into memory instead of being left
on disk.
cache_size: int, optionally specify the size in bytes for the chunk
cache of the HDF5 library. Useful when the HDF5 files has large
chunks and when using a sequantial iterator. The chunk cache allows
to only access the disk for the chunks and then copy the batches to
the GPU from memory, which can result in a significant speed up.
Sensible default values depend on the size of your data and the
batch size you wish to use. A rule of thumb is to make a chunk
contain 100 - 1000 batches and make sure they encompass complete
samples.
kwargs : dict, optional
Keyword arguments passed to `DenseDesignMatrix`.
"""
def __init__(self, filename, X=None, topo_view=None, y=None,
load_all=False, cache_size=None, **kwargs):
self.load_all = load_all
if h5py is None:
raise RuntimeError("Could not import h5py.")
if cache_size:
propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS)
settings = list(propfaid.get_cache())
settings[2] = cache_size
propfaid.set_cache(*settings)
fid = h5py.h5f.open(filename, fapl=propfaid)
self._file = h5py.File(fid)
else:
self._file = h5py.File(filename)
if X is not None:
X = self.get_dataset(X, load_all)
if topo_view is not None:
topo_view = self.get_dataset(topo_view, load_all)
if y is not None:
y = self.get_dataset(y, load_all)
super(HDF5DatasetDeprecated, self).__init__(X=X, topo_view=topo_view,
y=y, **kwargs)
def _check_labels(self):
"""
Sanity checks for X_labels and y_labels.
Since the np.all test used for these labels does not work with HDF5
datasets, we issue a warning that those values are not checked.
"""
if self.X_labels is not None:
assert self.X is not None
assert self.view_converter is None
assert self.X.ndim <= 2
if self.load_all:
assert np.all(self.X < self.X_labels)
else:
warnings.warn("HDF5Dataset cannot perform test np.all(X < " +
"X_labels). Use X_labels at your own risk.")
if self.y_labels is not None:
assert self.y is not None
assert self.y.ndim <= 2
if self.load_all:
assert np.all(self.y < self.y_labels)
else:
warnings.warn("HDF5Dataset cannot perform test np.all(y < " +
"y_labels). Use y_labels at your own risk.")
def get_dataset(self, dataset, load_all=False):
"""
Get a handle for an HDF5 dataset, or load the entire dataset into
memory.
Parameters
----------
dataset : str
Name or path of HDF5 dataset.
load_all : bool, optional (default False)
If true, load dataset into memory.
"""
if load_all:
data = self._file[dataset][:]
else:
data = self._file[dataset]
data.ndim = len(data.shape) # hdf5 handle has no ndim
return data
def iterator(self, *args, **kwargs):
"""
Get an iterator for this dataset.
The FiniteDatasetIterator uses indexing that is not supported by
HDF5 datasets, so we change the class to HDF5DatasetIterator to
override the iterator.next method used in dataset iteration.
Parameters
----------
WRITEME
"""
iterator = super(HDF5DatasetDeprecated, self).iterator(*args, **kwargs)
iterator.__class__ = HDF5DatasetIterator
return iterator
def set_topological_view(self, V, axes=('b', 0, 1, 'c')):
"""
Set up dataset topological view, without building an in-memory
design matrix.
This is mostly copied from DenseDesignMatrix, ex
|
cept:
* HDF5ViewConverter is used instead of DefaultViewConverter
* Data specs are derived f
|
rom topo_view, not X
* NaN checks have been moved to HDF5DatasetIterator.next
Note that y may be loaded into memory for reshaping if y.ndim != 2.
Parameters
----------
V : ndarray
Topological view.
axes : tuple, optional (default ('b', 0, 1, 'c'))
Order of axes in topological view.
"""
shape = [V.shape[axes.index('b')],
V.shape[axes.index(0)],
V.shape[axes.index(1)],
V.shape[axes.index('c')]]
self.view_converter = HDF5ViewConverter(shape[1:], axes=axes)
self.X = self.view_converter.topo_view_to_design_mat(V)
# self.X_topo_space stores a "default" topological space that
# will be used only when self.iterator is called without a
# data_specs, and with "topo=True", which is deprecated.
self.X_topo_space = self.view_converter.topo_space
# Update data specs
X_space = VectorSpace(dim=V.shape[axes.index('b')])
X_source = 'features'
if self.y is None:
space = X_space
source = X_source
else:
if self.y.ndim == 1:
dim = 1
else:
dim = self.y.shape[-1]
# check if y_labels has been specified
if getattr(self, 'y_labels', None) is not None:
y_space = IndexSpace(dim=dim, max_labels=self.y_labels)
elif getattr(self, 'max_labels', None) is not None:
y_space = IndexSpace(dim=dim, max_labels=self.max_labels)
else:
y_space = VectorSpace(dim=dim)
y_source = 'targets'
space = CompositeSpace((X_space, y_space))
source = (X_source, y_source)
self.data_specs = (space, source)
self.X_space = X_space
self._iter_data_specs = (X_space, X_source)
class HDF5DatasetIterator(FiniteDatasetIterator):
"""
Dataset iterator for HDF5 datasets.
FiniteDatasetIterator expects a design matrix to be available, but this
will not always be the case when using HDF5 datasets with topological
views.
Parameters
----------
dataset : Dataset
Dataset over which to iterate.
subset_iterator : object
Iterator that returns slices of the dataset.
data_specs : tuple, optional
A (space, source) tuple.
return_tuple : bool, optional (default False)
Whether to return a tuple even if only one source is used.
convert : list, optional
A list of callables (in the same order as the sources in
data_specs) that will be applied to each slice of the dataset.
"""
def next(self):
"""
Get the next subset of the dataset during dataset iteration.
Converts index selections for
|
google-research/understanding-curricula
|
third_party/__init__.py
|
Python
|
apache-2.0
| 616
| 0.001623
|
# Copyright 2021 Google LLC
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may no
|
t use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import models
from . import data
|
cstipkovic/spidermonkey-research
|
js/src/devtools/rootAnalysis/analyze.py
|
Python
|
mpl-2.0
| 10,455
| 0.003922
|
#!/usr/bin/python
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Runs the static rooting analysis
"""
from subprocess import Popen
import subprocess
import os
import argparse
import sys
import re
def env(config):
e = dict(os.environ)
e['PATH'] = ':'.join(p for p in (config.get('gcc_bin'), config.get('sixgill_bin'), e['PATH']) if p)
e['XDB'] = '%(sixgill_bin)s/xdb.so' % config
e['SOURCE'] = config['source']
e['ANALYZED_OBJDIR'] = config['objdir']
return e
def fill(command, config):
try:
return tuple(s % config for s in command)
except:
print("Substitution failed:")
problems = []
for fragment in command:
try:
fragment % config
except:
problems.append(fragment)
raise Exception("\n".join(["Substitution failed:"] + [ " %s" % s for s in problems ]))
def print_command(command, outfile=None, env=None):
output = ' '.join(command)
if outfile:
output += ' > ' + outfile
if env:
changed = {}
e = os.environ
for key,value in env.items():
if (key not in e) or (e[key] != value):
changed[key] = value
if changed:
outputs = []
for key, value in changed.items():
if key in e and e[key] in value:
start = value.index(e[key])
end = start + len(e[key])
outputs.append('%s="%s${%s}%s"' % (key,
value[:start],
key,
value[end:]))
else:
outputs.append("%s='%s'" % (key, value))
output = ' '.join(outputs) + " " + output
print output
def generate_hazards(config, outfilename):
jobs = []
for i in range(int(config['jobs'])):
command = fill(('%(js)s',
'%(analysis_scriptdir)s/analyzeRoots.js',
'%(gcFunctions_list)s',
'%(gcEdges)s',
'%(suppressedFunctions_list)s',
'%(gcTypes)s',
'%(typeInfo)s',
str(i+1), '%(jobs)s',
'tmp.%s' % (i+1,)),
config)
outfile = 'rootingHazards.%s' % (i+1,)
output = open(outfi
|
le, 'w')
if config['verbose']:
print_command(command, outfile=outfile, env=env(config))
jobs.append((command, Popen(command, stdout=output, env=env(config))))
final_status = 0
while jobs:
pid, status = os.wait()
jobs = [ job f
|
or job in jobs if job[1].pid != pid ]
final_status = final_status or status
if final_status:
raise subprocess.CalledProcessError(final_status, 'analyzeRoots.js')
with open(outfilename, 'w') as output:
command = ['cat'] + [ 'rootingHazards.%s' % (i+1,) for i in range(int(config['jobs'])) ]
if config['verbose']:
print_command(command, outfile=outfilename)
subprocess.call(command, stdout=output)
JOBS = { 'dbs':
(('%(ANALYSIS_SCRIPTDIR)s/run_complete',
'--foreground',
'--no-logs',
'--build-root=%(objdir)s',
'--wrap-dir=%(sixgill)s/scripts/wrap_gcc',
'--work-dir=work',
'-b', '%(sixgill_bin)s',
'--buildcommand=%(buildcommand)s',
'.'),
()),
'list-dbs':
(('ls', '-l'),
()),
'callgraph':
(('%(js)s', '%(analysis_scriptdir)s/computeCallgraph.js', '%(typeInfo)s'),
'callgraph.txt'),
'gcFunctions':
(('%(js)s', '%(analysis_scriptdir)s/computeGCFunctions.js', '%(callgraph)s',
'[gcFunctions]', '[gcFunctions_list]', '[gcEdges]', '[suppressedFunctions_list]'),
('gcFunctions.txt', 'gcFunctions.lst', 'gcEdges.txt', 'suppressedFunctions.lst')),
'gcTypes':
(('%(js)s', '%(analysis_scriptdir)s/computeGCTypes.js',
'[gcTypes]', '[typeInfo]'),
('gcTypes.txt', 'typeInfo.txt')),
'allFunctions':
(('%(sixgill_bin)s/xdbkeys', 'src_body.xdb',),
'allFunctions.txt'),
'hazards':
(generate_hazards, 'rootingHazards.txt'),
'explain':
((os.environ.get('PYTHON', 'python2.7'),
'%(analysis_scriptdir)s/explain.py',
'%(hazards)s', '%(gcFunctions)s',
'[explained_hazards]', '[unnecessary]', '[refs]'),
('hazards.txt', 'unnecessary.txt', 'refs.txt'))
}
def out_indexes(command):
for i in range(len(command)):
m = re.match(r'^\[(.*)\]$', command[i])
if m:
yield (i, m.group(1))
def run_job(name, config):
cmdspec, outfiles = JOBS[name]
print("Running " + name + " to generate " + str(outfiles))
if hasattr(cmdspec, '__call__'):
cmdspec(config, outfiles)
else:
temp_map = {}
cmdspec = fill(cmdspec, config)
if isinstance(outfiles, basestring):
stdout_filename = '%s.tmp' % name
temp_map[stdout_filename] = outfiles
if config['verbose']:
print_command(cmdspec, outfile=outfiles, env=env(config))
else:
stdout_filename = None
pc = list(cmdspec)
outfile = 0
for (i, name) in out_indexes(cmdspec):
pc[i] = outfiles[outfile]
outfile += 1
if config['verbose']:
print_command(pc, env=env(config))
command = list(cmdspec)
outfile = 0
for (i, name) in out_indexes(cmdspec):
command[i] = '%s.tmp' % name
temp_map[command[i]] = outfiles[outfile]
outfile += 1
sys.stdout.flush()
if stdout_filename is None:
subprocess.check_call(command, env=env(config))
else:
with open(stdout_filename, 'w') as output:
subprocess.check_call(command, stdout=output, env=env(config))
for (temp, final) in temp_map.items():
try:
os.rename(temp, final)
except OSError:
print("Error renaming %s -> %s" % (temp, final))
raise
config = { 'ANALYSIS_SCRIPTDIR': os.path.dirname(__file__) }
defaults = [ '%s/defaults.py' % config['ANALYSIS_SCRIPTDIR'],
'%s/defaults.py' % os.getcwd() ]
parser = argparse.ArgumentParser(description='Statically analyze build tree for rooting hazards.')
parser.add_argument('step', metavar='STEP', type=str, nargs='?',
help='run starting from this step')
parser.add_argument('--source', metavar='SOURCE', type=str, nargs='?',
help='source code to analyze')
parser.add_argument('--objdir', metavar='DIR', type=str, nargs='?',
help='object directory of compiled files')
parser.add_argument('--js', metavar='JSSHELL', type=str, nargs='?',
help='full path to ctypes-capable JS shell')
parser.add_argument('--upto', metavar='UPTO', type=str, nargs='?',
help='last step to execute')
parser.add_argument('--jobs', '-j', default=None, metavar='JOBS', type=int,
help='number of simultaneous analyzeRoots.js jobs')
parser.add_argument('--list', const=True, nargs='?', type=bool,
help='display available steps')
parser.add_argument('--buildcommand', '--build', '-b', type=str, nargs='?',
help='command to build the tree being analyzed')
parser.add_argument('--tag', '-t', type=str, nargs='?',
help='name of job, also sets build command to "build.<tag>"')
parser.add_argument('--expect-file', type=str, nargs='?',
|
whiplash01/Hyperloop
|
docs/xdsm/hyperloop_xdsm.py
|
Python
|
apache-2.0
| 1,244
| 0.006431
|
from XDSM import XDSM
opt = 'Optimization'
dat = 'DataInter'
mda = 'MDA'
anl = 'Analysis'
x = XDSM()
#x.addComp('driver', mda, 'Solver')
x.addComp('assembly inputs', anl, 'assembly inputs')
x.addComp('compress', anl, 'compress')
x.addComp('mission', anl, 'mission')
x.addComp('pod', anl, 'pod')
x.addComp('flow_limit', anl, 'flow\_limit')
x.addComp('tube_wall_temp', anl, 'tube\_w
|
all\_temp')
x.addDep('mission', 'compress', dat, '', stack=True)
x.addDep('pod', 'compress', dat, '', stack=True)
x.addDep('pod', 'mission', dat, '', stack=True)
x.addDep('flow_limit', 'pod', dat, '', stack=True)
x.addDep('tube_wall_temp', 'compress', dat
|
, '', stack=True)
#reverse couplings
x.addDep('compress', 'flow_limit', dat, '', stack=True)
x.addDep('compress', 'tube_wall_temp', dat, '', stack=True)
x.addDep('compress', 'pod', dat, '', stack=True)
#assembly inputs
x.addDep('compress', 'assembly inputs', dat, '', stack=True)
x.addDep('mission', 'assembly inputs', dat, '', stack=True)
x.addDep('flow_limit', 'assembly inputs', dat, '', stack=True)
x.addDep('tube_wall_temp', 'assembly inputs', dat, '', stack=True)
x.addDep('pod', 'assembly inputs', dat, '', stack=True)
#x.addDep('compress','driver', dat, '$W_{in}$')
x.write('hyperloop',True)
|
shawndaniel/evernote-exporter
|
evernote_exporter.py
|
Python
|
gpl-3.0
| 10,506
| 0.001523
|
from fnmatch import fnmatch
import os
from sys import exit
import html2text
import re
import urllib
import shutil
import logging
import sqlite3
from sys import stdout
logging.basicConfig(filename='error_log.log', filemode='a')
class BackupEvernote(object):
def __init__(self, evernote_dir, db_dir='', output_dir=''):
self.forbidden = ["?", "#", "/", "\\", "*", '"', "<", ">", "|", "%", " "]
self.fb_w_trail = self.forbidden
del self.fb_w_trail[2]
self.evernote_dir = evernote_dir
self.db_dir = db_dir
self.output_dir = output_dir
def _counter(self, ind, msg):
stdout.write('\r' + '\t%s: %s' % (msg, ind))
stdout.flush()
def _exception(self, msg, file_path, e):
logging.error(e)
while True:
inp = input('Cannot %s: %s\n'
'Error: %s\n'
'Skip & continue? y/n: ' % (msg, file_path, e))
if inp == 'y':
break
else:
exit(0)
return
def _multi_asterisk_fix(self, matchobj):
return matchobj.group(0)[1:]
def _get_pt(self, string):
path = string.split('[')[1].split(']')[0]
title = string.split('(')[1].split(')')[0]
return path, title
def _image_url_fix(self, matchobj):
url = ''
string = matchobj.group(0)
# remove escape chars
if '\n' in string:
string = string.replace('\n', ' ')
# this is a url
if '![' not in string:
url, _ = self._get_pt(string)
return '%s' % url
# image contains url
url_pat = re.compile(r'.(\[.*\])\(.*\)\(.*\)$', re.MULTILINE)
if re.match(url_pat, string):
url = string.rpartition('(')[-1].strip(')')
# image with or without url
title, path = self._get_pt(string)
path = '%s/%s/%s' % (self.output_dir, 'uncategorized', path)
# todo: image path (remove random dash? -_) i.e t seal img
path = self._remove_chars(path, self.fb_w_trail, trail=True)
path += '?800'
if not url:
return '{{%s|%s}}' % (path, title)
else:
return '[[%s|{{%s|%s}}]]' % (url, path, title)
def _remove_asterisks(self, matchobj):
return re.sub(r'\**', '', matchobj.group(0))
def _fix_spacing(self, matchobj):
# todo: add wider bullet conversions i.e imac note
string = matchobj.group(0)
s_len = len(string) - 1
if s_len <= 1:
return string
elif s_len == 2:
return '*'
elif s_len == 6:
return ' *'
elif s_len == 7:
return ' *'
elif s_len == 11:
return ' *'
else:
return string
def to_zim_syntax(self, content):
""" Consider editing this func to fit the syntax of your chosen note taking software"""
# headers
# todo: remove heading chars / do not add heading if proceeded by image or url
# todo: only ensure 1 h1 header (first), replace other h1's with h2
new_c = content.replace('####', '=').('### ', '== ').replace('## ', '==== ').replace('# ', '====== ')
# line separation?
# todo: remake regex r'[#*_-]{3,}' not proceeded by words (\W) i.e jsand
line_pat = re.compile(r'^\*[^\S\n]*\*[^\S\n]*\*\n', re.MULTILINE)
new_c = re.sub(line_pat, ('-' * 80), new_c)
# todo: regex to replace 3+ line breaks with 2
# todo: regex for bold text, 2 * followed by words then 2 *
# todo: replace \- at start of the line with bullet
# fix bullet lists
new_c = re.sub(r'\*[^\S\n]+?\*', self._multi_asterisk_fix, new_c) # multiple asterisks on same line
spaces = re.compile(r'^[^\S\n]*\*', re.MULTILINE)
new_c = re.sub(spaces, self._fix_spacing, new_c)
# fix urls and images
new_c = re.sub(r'\*{2}(\[)|\)\*{2}', self._remove_asterisks, new_c)
# new_c = re.sub(r'!*\[[^\]]*\]\([^\)]*\)', self._image_url_fix, new_c)
new_c = re.sub(r'!*[\\\[]*\[[^\]]*[\\\]]*\([^\)]*[\]\)]*(\([^\)]*\))*', self._image_url_fix, new_c)
return new_c
def edit_file(self, full_path, filename, to_zim=False):
text_maker = html2text.HTML2Text()
with open(full_path, 'r') as f:
html = f.read()
content = ''
if html:
try:
content = text_maker.handle(unicode(html, errors='ignore'))
content = content.encode('ascii', 'ignore')
content = content.split('\00')[0] # remove null chars
content = content.replace('\.', '.') # remove escape chars
except Exception as e:
self._exception('convert content of note to markdown', full_path, e)
else:
content = ''
if to_zim:
content = self.to_zim_syntax(content)
fn_path = self._rename_file(full_path, filename)
with open(fn_path, 'w') as f:
try:
f.write(content.encode('ascii', 'ignore'))
except Exception as e:
self._exception('save note', fn_path, e)
return
def _remove_chars(self, stack_or_nb, folder_chars, trail=False):
try:
if not trail:
stack_or_nb = stack_or_nb.replace('/', '&')
for char in folder_chars:
if char in stack_or_nb:
stack_or_nb = stack_or_nb.replace(char, '_')
except Exception:
raise
finally:
return stack_or_nb
def _rename_file(self, full_path, filename, trail=False):
filename = self._remove_chars(filename, self.forbidden, trail)
renamed = filename.replace('.html', '.txt')
old_filename = full_path.rpartition('/')[-1]
return full_path.replace(old_filename, renamed)
def nbooks_to_dirs(self):
""" creates notebook & notebook stack folder structure containing all respective notes"""
print "\nOrganizing notes by directory (based on notebooks & stacks)..."
copied = []
con = sqli
|
te3.connect(self.db_dir)
notebooks = con.execute("SELECT * FROM notebook_attr;").fetchall()
folder_chars = self.forbidden
del folder_chars[2]
for ind, i in enumerate(notebooks):
nb_id, notebook, stack = i[0], i[1], i[2]
stack = self._remove_chars(stack, folder_chars)
notebook = self._remove_chars(notebook, folder_
|
chars)
nb_notes = con.execute('SELECT * FROM note_attr WHERE note_attr.notebook_uid = %s;' % nb_id)
notes_set = {i[1] for i in nb_notes}
s_dir = ''
if notebook and not stack:
notebook_dir = self.output_dir + '/' + notebook
if not os.path.isdir(notebook_dir):
os.mkdir(notebook_dir)
s_dir = notebook_dir
else:
if stack:
stack_path = self.output_dir + '/' + stack
if not os.path.isdir(stack_path):
os.mkdir(stack_path)
s_dir = stack_path
if notebook:
nb_in_stack = self.output_dir + '/%s/%s' % (stack, notebook)
if not os.path.isdir(nb_in_stack):
os.mkdir(nb_in_stack)
s_dir = nb_in_stack
for p, d, files in os.walk(self.evernote_dir):
for f in files:
fl = urllib.unquote(f)
fl_name = fl.rpartition('.')[0]
f_path = os.path.join(p, f)
if fl_name in notes_set:
copied.append(fl)
out_path = os.path.join(s_dir, f)
shutil.copy(f_path, out_path)
os.rename(out_path, os.path.join(s_dir, fl))
self._counter(ind, 'notebooks/stacks exported')
self.transfer_uncategorized(copied)
return
def transfer_uncategorized(self, copied):
|
Nvizible/shotgunEvents
|
src/daemonizer.py
|
Python
|
mit
| 5,101
| 0.006077
|
#!/usr/bin/env python
# Taken and modified from:
# http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/
import atexit
import os
import signal
import sys
import time
if (hasattr(os, "devnull")):
DEVNULL = os.devnull
else:
DEVNULL = "/dev/null"
class Daemon(object):
"""
A generic daemon class.
Usage: subclass the Daemon class and override the _run() method
"""
def __init__(self, serviceName, pidfile, stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL):
super(Daemon, self).__init__()
self._serviceName = serviceName
self._stdin = stdin
self._stdout = stdout
self._stderr = stderr
self._pidfile = pidfile
def _daemonize(self):
"""
Do the UNIX double-fork magic, see Stevens' "Advanced
Programming in the UNIX Environment" for details (ISBN 0201563177)
http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
"""
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = file(self._stdin, 'r')
so = file(self._stdout, 'a+')
se = file(self._stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile and subsys file
pid = str(os.getpid())
file(self._pidfile,'w+').write("%s\n" % pid)
if os.path.exists('/var/lock/subsys'):
fh = open(os.path.join('/var/lock/subsys', self._serviceName), 'w')
fh.close()
def _delpid(self):
if os.path.exists(self._pidfile):
os.remove(self._pidfile)
subsysPath = os.path.join('/var/lock/subsys', self._serviceName)
if os.path.exists(subsysPath):
os.remove(subsysPath)
self._cleanup()
def start(self, daemonize=True):
"""
Start the daemon
"""
# Check for a pidfile to see if the daemon already runs
try:
pf = file(self._pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if pid:
message = "pidfile %s already exist. Daemon already running?\n"
sys.stderr.write(message % self._p
|
idfile)
sys.exit(1)
# Start the daemon
if daemonize:
self._daemonize()
# Cleanup handling
def termHandler(signum, frame):
self._delpid()
signal.signal(signal.SIGTERM, termHandl
|
er)
atexit.register(self._delpid)
# Run the daemon
self._run()
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self._pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "pidfile %s does not exist. Daemon not running?\n"
sys.stderr.write(message % self._pidfile)
return # not an error in a restart
# Try killing the daemon process
try:
while 1:
os.kill(pid, signal.SIGTERM)
time.sleep(0.1)
except OSError, err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self._pidfile):
os.remove(self._pidfile)
else:
print str(err)
sys.exit(1)
def foreground(self):
self.start(daemonize=False)
def restart(self, daemonize=True):
"""
Restart the daemon
"""
self.stop()
self.start(daemonize)
def _run(self):
"""
You should override this method when you subclass Daemon. It will be
called after the process has been daemonized by start() or restart().
"""
raise NotImplementedError('You must implement the method in your class.')
def _cleanup(self):
"""
You should override this method when you subclass Daemon. It will be
called when the daemon exits.
"""
raise NotImplementedError('You must implement the method in your class.')
|
xuru/pyvisdk
|
pyvisdk/do/extended_element_description.py
|
Python
|
mit
| 1,021
| 0.008815
|
import logging
|
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ExtendedElementDescription(vim, *args, **kwargs):
|
''''''
obj = vim.client.factory.create('ns0:ExtendedElementDescription')
# do some validation checking...
if (len(args) + len(kwargs)) < 4:
raise IndexError('Expected at least 5 arguments got: %d' % len(args))
required = [ 'messageCatalogKeyPrefix', 'key', 'label', 'summary' ]
optional = [ 'messageArg', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
aurelo/lphw
|
source/ex9.py
|
Python
|
mit
| 413
| 0
|
days = "Mon Tue Wed Thu Fri Sat Sun"
months = "Jan\nFeb\nMar\nApr\n...\nSpe\nNov\nDec"
print "Here are the days:",
|
days
print "Here are the months:", months
print "Months: %r" % months # raw printing retains special characters
print '''
There's something going on here
with the three double quotes
We'll be able
|
to type as much as we like
Even 4 lines if we want, or 5, or 6
'''
print """
this
works
too
"""
|
nth10sd/lithium
|
src/lithium/interestingness/timed_run.py
|
Python
|
mpl-2.0
| 5,962
| 0.000335
|
# coding=utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
"""Run a subprocess with timeout
"""
import argparse
import collections
import platform
import signal
import subprocess
import sys
import time
from pathlib import Path
from typing import BinaryIO, Callable, Dict, List, Optional, Union
(CRASHED, TIMED_OUT, NORMAL, ABNORMAL, NONE) = range(5)
# Define struct that contains data from a process that has already ended.
RunData = collections.namedtuple(
"RunData",
"sta, return_code, msg,
|
elapsedtime, killed, out, err, pid",
)
class ArgumentParser(argparse.ArgumentParser):
"""Argument parser with `timeout` and `cmd_with_args`"""
def __init__(self, *args, **kwds) -> None: # type: ignore
super().__init__(
|
*args, **kwds)
self.add_argument(
"-t",
"--timeout",
default=120,
dest="timeout",
type=int,
help="Set the timeout. Defaults to '%(default)s' seconds.",
)
self.add_argument("cmd_with_flags", nargs=argparse.REMAINDER)
def get_signal_name(signum: int, default: str = "Unknown signal") -> str:
"""Stringify a signal number. The result will be something like "SIGSEGV",
or from Python 3.8, "Segmentation fault".
Args:
signum: Signal number to lookup
default: Default to return if signal isn't recognized.
Returns:
String description of the signal.
"""
if sys.version_info[:2] >= (3, 8) and platform.system() != "Windows":
return signal.strsignal(signum) or default
for member in dir(signal):
if member.startswith("SIG") and not member.startswith("SIG_"):
if getattr(signal, member) == signum:
return member
return default
def timed_run(
cmd_with_args: List[str],
timeout: int,
log_prefix: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
inp: str = "",
preexec_fn: Optional[Callable[[], None]] = None,
) -> RunData:
"""If log_prefix is None, uses pipes instead of files for all output.
Args:
cmd_with_args: List of command and parameters to be executed
timeout: Timeout for the command to be run, in seconds
log_prefix: Prefix string of the log files
env: Environment for the command to be executed in
inp: stdin to be passed to the command
preexec_fn: called in child process after fork, prior to exec
Raises:
TypeError: Raises if input parameters are not of the desired types
(e.g. cmd_with_args should be a list)
OSError: Raises if timed_run is attempted to be used with gdb
Returns:
A rundata instance containing run information
"""
if not isinstance(cmd_with_args, list):
raise TypeError("cmd_with_args should be a list (of strings).")
if not isinstance(timeout, int):
raise TypeError("timeout should be an int.")
if log_prefix is not None and not isinstance(log_prefix, str):
raise TypeError("log_prefix should be a string.")
if preexec_fn is not None and not hasattr(preexec_fn, "__call__"):
raise TypeError("preexec_fn should be callable.")
prog = Path(cmd_with_args[0]).expanduser()
cmd_with_args[0] = str(prog)
if prog.stem == "gdb":
raise OSError(
"Do not use this with gdb, because kill in timed_run will "
"kill gdb but leave the process within gdb still running"
)
sta = NONE
msg = ""
child_stderr: Union[int, BinaryIO] = subprocess.PIPE
child_stdout: Union[int, BinaryIO] = subprocess.PIPE
if log_prefix is not None:
# pylint: disable=consider-using-with
child_stdout = open(log_prefix + "-out.txt", "wb")
child_stderr = open(log_prefix + "-err.txt", "wb")
start_time = time.time()
# pylint: disable=consider-using-with,subprocess-popen-preexec-fn
child = subprocess.Popen(
cmd_with_args,
env=env,
stderr=child_stderr,
stdout=child_stdout,
preexec_fn=preexec_fn,
)
try:
stdout, stderr = child.communicate(
input=inp.encode("utf-8"),
timeout=timeout,
)
except subprocess.TimeoutExpired:
child.kill()
stdout, stderr = child.communicate()
sta = TIMED_OUT
except Exception as exc: # pylint: disable=broad-except
print("Tried to run:")
print(" %r" % cmd_with_args)
print("but got this error:")
print(" %s" % exc)
sys.exit(2)
finally:
if isinstance(child_stderr, BinaryIO) and isinstance(child_stdout, BinaryIO):
child_stdout.close()
child_stderr.close()
elapsed_time = time.time() - start_time
if sta == TIMED_OUT:
msg = "TIMED OUT"
elif child.returncode == 0:
msg = "NORMAL"
sta = NORMAL
elif 0 < child.returncode < 0x80000000:
msg = "ABNORMAL exit code " + str(child.returncode)
sta = ABNORMAL
else:
# return_code < 0 (or > 0x80000000 in Windows)
# The program was terminated by a signal, which usually indicates a crash.
# Mac/Linux only!
# XXX: this doesn't work on Windows
if child.returncode < 0:
signum = -child.returncode
else:
signum = child.returncode
msg = "CRASHED signal %d (%s)" % (
signum,
get_signal_name(signum),
)
sta = CRASHED
return RunData(
sta,
child.returncode if sta != TIMED_OUT else None,
msg,
elapsed_time,
sta == TIMED_OUT,
log_prefix + "-out.txt" if log_prefix is not None else stdout,
log_prefix + "-err.txt" if log_prefix is not None else stderr,
child.pid,
)
|
doismellburning/django
|
tests/contenttypes_tests/tests.py
|
Python
|
bsd-3-clause
| 16,513
| 0.001635
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps.registry import Apps, apps
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation
)
from django.contrib.contenttypes import management
from django.contrib.contenttypes.models import ContentType
from django.core import checks
from django.db import connections, models
from django.test import TestCase, override_settings
from django.test.utils import captured_stdout
from django.utils.encoding import force_str, force_text
from .models import Author, Article, SchemeIncludedURL
@override_settings(ROOT_URLCONF='contenttypes_tests.urls')
class ContentTypesViewsTests(TestCase):
fixtures = ['testdata.json']
def test_shortcut_with_absolute_url(self):
"Can view a shortcut for an Author object that has a get_absolute_url method"
for obj in Author.objects.all():
short_url = '/shortcut/%s/%s/' % (ContentType.objects.get_for_model(Author).id, obj.pk)
response = self.client.get(short_url)
self.assertRedirects(response, 'http://testserver%s' % obj.get_absolute_url(),
status_code=302, target_status_code=404)
def test_shortcut_with_absolute_url_including_scheme(self):
"""
Can view a shortcut when object's get_absolute_url returns a full URL
the tested URLs are in fixtures/testdata.json :
"ht
|
tp://...", "https://..." and "//..."
"""
for obj in SchemeIncludedURL.objects.all():
short_url = '/shortcut/%s/%s/' % (ContentType.objects.get_for_model(SchemeIncludedURL).id, obj.pk)
|
response = self.client.get(short_url)
self.assertRedirects(response, obj.get_absolute_url(),
status_code=302,
fetch_redirect_response=False)
def test_shortcut_no_absolute_url(self):
"Shortcuts for an object that has no get_absolute_url method raises 404"
for obj in Article.objects.all():
short_url = '/shortcut/%s/%s/' % (ContentType.objects.get_for_model(Article).id, obj.pk)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_wrong_type_pk(self):
short_url = '/shortcut/%s/%s/' % (ContentType.objects.get_for_model(Author).id, 'nobody/expects')
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_shortcut_bad_pk(self):
short_url = '/shortcut/%s/%s/' % (ContentType.objects.get_for_model(Author).id, '42424242')
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_nonint_content_type(self):
an_author = Author.objects.all()[0]
short_url = '/shortcut/%s/%s/' % ('spam', an_author.pk)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_bad_content_type(self):
an_author = Author.objects.all()[0]
short_url = '/shortcut/%s/%s/' % (42424242, an_author.pk)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_create_contenttype_on_the_spot(self):
"""
Make sure ContentTypeManager.get_for_model creates the corresponding
content type if it doesn't exist in the database (for some reason).
"""
class ModelCreatedOnTheFly(models.Model):
name = models.CharField()
class Meta:
verbose_name = 'a model created on the fly'
app_label = 'my_great_app'
apps = Apps()
ct = ContentType.objects.get_for_model(ModelCreatedOnTheFly)
self.assertEqual(ct.app_label, 'my_great_app')
self.assertEqual(ct.model, 'modelcreatedonthefly')
self.assertEqual(force_text(ct), 'modelcreatedonthefly')
class IsolatedModelsTestCase(TestCase):
def setUp(self):
# The unmanaged models need to be removed after the test in order to
# prevent bad interactions with the flush operation in other tests.
self._old_models = apps.app_configs['contenttypes_tests'].models.copy()
def tearDown(self):
apps.app_configs['contenttypes_tests'].models = self._old_models
apps.all_models['contenttypes_tests'] = self._old_models
apps.clear_cache()
@override_settings(SILENCED_SYSTEM_CHECKS=['fields.W342']) # ForeignKey(unique=True)
class GenericForeignKeyTests(IsolatedModelsTestCase):
def test_str(self):
class Model(models.Model):
field = GenericForeignKey()
expected = "contenttypes_tests.Model.field"
actual = force_str(Model.field)
self.assertEqual(expected, actual)
def test_missing_content_type_field(self):
class TaggedItem(models.Model):
# no content_type field
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
errors = TaggedItem.content_object.check()
expected = [
checks.Error(
"The GenericForeignKey content type references the non-existent field 'TaggedItem.content_type'.",
hint=None,
obj=TaggedItem.content_object,
id='contenttypes.E002',
)
]
self.assertEqual(errors, expected)
def test_invalid_content_type_field(self):
class Model(models.Model):
content_type = models.IntegerField() # should be ForeignKey
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey(
'content_type', 'object_id')
errors = Model.content_object.check()
expected = [
checks.Error(
"'Model.content_type' is not a ForeignKey.",
hint="GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.",
obj=Model.content_object,
id='contenttypes.E003',
)
]
self.assertEqual(errors, expected)
def test_content_type_field_pointing_to_wrong_model(self):
class Model(models.Model):
content_type = models.ForeignKey('self') # should point to ContentType
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey(
'content_type', 'object_id')
errors = Model.content_object.check()
expected = [
checks.Error(
"'Model.content_type' is not a ForeignKey to 'contenttypes.ContentType'.",
hint="GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.",
obj=Model.content_object,
id='contenttypes.E004',
)
]
self.assertEqual(errors, expected)
def test_missing_object_id_field(self):
class TaggedItem(models.Model):
content_type = models.ForeignKey(ContentType)
# missing object_id field
content_object = GenericForeignKey()
errors = TaggedItem.content_object.check()
expected = [
checks.Error(
"The GenericForeignKey object ID references the non-existent field 'object_id'.",
hint=None,
obj=TaggedItem.content_object,
id='contenttypes.E001',
)
]
self.assertEqual(errors, expected)
def test_field_name_ending_with_underscore(self):
class Model(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object_ = GenericForeignKey(
'content_type', 'object_id')
errors = Model.content_object_.check()
expected = [
checks.Error(
'Field names must not end with an underscore.',
hint=None,
obj=Model.content_object_,
id='fields.E001',
)
]
self.assertEqual(errors, exp
|
171121130/SWI
|
venv/Lib/site-packages/openpyxl/reader/__init__.py
|
Python
|
mit
| 35
| 0
|
# Copyright (c) 2010
|
-2017 ope
|
npyxl
|
paulsmith/geodjango
|
tests/regressiontests/cache/tests.py
|
Python
|
bsd-3-clause
| 5,938
| 0.007943
|
# -*- coding: utf-8 -*-
# Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
import time
import unittest
from django.core.cache import cache
from django.utils.cache import patch_vary_headers
from django.http import HttpResponse
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Cache(unittest.TestCase):
def test_simple(self):
# simple set/get
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_add(self):
# test add (only add if key isn't already in cache)
cache.add("addkey1", "value")
cache.add("addkey1", "newvalue")
self.assertEqual(cache.get("addkey1"), "value")
def test_non_existent(self):
# get with non-existent keys
self.assertEqual(cache.get("does_not_exist"), None)
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# get_many
cache.set('a', 'a')
cache.set('b', 'b')
cache.set('c', 'c')
cache.set('d', 'd')
self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
def test_delete(self):
# delete
cache.set("key1", "spam")
cache.set("key2", "eggs")
self.assertEqual(cache.get("key1"), "spam")
cache.delete("key1")
self.assertEqual(cache.get("key1"), None)
self.assertEqual(cache.get("key2"), "eggs")
def test_has_key(self):
# has_key
cache.set("hello1", "goodbye1")
self.assertEqual(cache.has_key("hello1"), True)
self.
|
assertEqual(cache.has_key("goodbye1"), False)
def test_in(self):
cache.set("hello2", "goodbye2")
self.assertEqual("hello2" in cache, True)
self.assertEqual("goodbye2" in cache, False)
def test_data_types(self):
stuff = {
'string' : 'this is a string',
'int' : 42,
'list'
|
: [1, 2, 3, 4],
'tuple' : (1, 2, 3, 4),
'dict' : {'A': 1, 'B' : 2},
'function' : f,
'class' : C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_expiration(self):
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertEqual(cache.get("expire1"), None)
cache.add("expire2", "newvalue")
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertEqual(cache.has_key("expire3"), False)
def test_unicode(self):
stuff = {
u'ascii': u'ascii_value',
u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
u'ascii': {u'x' : 1 }
}
for (key, value) in stuff.items():
cache.set(key, value)
self.assertEqual(cache.get(key), value)
import os
import md5
import shutil
import tempfile
from django.core.cache.backends.filebased import CacheClass as FileCache
class FileBasedCacheTests(unittest.TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
self.dirname = tempfile.mktemp()
os.mkdir(self.dirname)
self.cache = FileCache(self.dirname, {})
def tearDown(self):
shutil.rmtree(self.dirname)
def test_hashing(self):
"""Test that keys are hashed into subdirectories correctly"""
self.cache.set("foo", "bar")
keyhash = md5.new("foo").hexdigest()
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
self.assert_(os.path.exists(keypath))
def test_subdirectory_removal(self):
"""
Make sure that the created subdirectories are correctly removed when empty.
"""
self.cache.set("foo", "bar")
keyhash = md5.new("foo").hexdigest()
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
self.assert_(os.path.exists(keypath))
self.cache.delete("foo")
self.assert_(not os.path.exists(keypath))
self.assert_(not os.path.exists(os.path.dirname(keypath)))
self.assert_(not os.path.exists(os.path.dirname(os.path.dirname(keypath))))
class CacheUtils(unittest.TestCase):
"""TestCase for django.utils.cache functions."""
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
response = HttpResponse()
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
if __name__ == '__main__':
unittest.main()
|
golaizola/pelisalacarta-xbmc
|
core/scrapertools.py
|
Python
|
gpl-3.0
| 52,267
| 0.011898
|
#------------------------------------------------------------
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# Download Tools
# Based on the code from VideoMonkey XBMC Plugin
#------------------------------------------------------------
# pelisalacarta
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
# Creado por:
# Jesús (tvalacarta@gmail.com)
# jurrabi (jurrabi@gmail.com)
# bandavi (xbandavix@gmail.com)
# Licencia: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
#------------------------------------------------------------
# Historial de cambios:
#------------------------------------------------------------
import urlparse,urllib2,urllib
import time
import os
import config
import logger
import re
import downloadtools
import socket
logger.info("[scrapertools.py] init")
# True - Muestra las cabeceras HTTP en el log
# False - No las muestra
DEBUG_LEVEL = True
CACHE_ACTIVA = "0" # Automatica
CACHE_SIEMPRE = "1" # Cachear todo
CACHE_NUNCA = "2" # No cachear nada
CACHE_PATH = config.get_setting("cache.dir")
logger.info("[scrapertools.py] CACHE_PATH="+CACHE_PATH)
DEBUG = False
def cache_page(url,post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']],modo_cache=CACHE_ACTIVA, timeout=socket.getdefaulttimeout()):
return cachePage(url,post,headers,modo_cache,timeout=timeout)
# TODO: (3.1) Quitar el parámetro modoCache (ahora se hace por configuración)
# TODO: (3.2) Usar notación minusculas_con_underscores para funciones y variables como recomienda Python http://www.python.org/dev/peps/pep-0008/
def cachePage(url,post=None,headers=[['User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; es-ES; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12']],modoCache=CACHE_ACTIVA, timeout=socket.getdefaulttimeout()):
logger.info("[scrapertools.py] cachePage url="+url)
modoCache = config.get_setting("cache.mode")
'''
if config.get_platform()=="plex":
from PMS import HTTP
try:
logger.info("url="+url)
data = HTTP.Request(url)
logger.info("descargada")
except:
data = ""
logger.error("Error descargando "+url)
import sys
for line in sys.exc_info():
logger.error( "%s" % line )
return data
'''
# CACHE_NUNCA: Siempre va a la URL a descargar
# obligatorio para peticiones POST
if modoCache == CACHE_NUNCA or post is not None:
logger.info("[scrapertools.py] MODO_CACHE=2 (no cachear)")
try:
data = downloadpage(url,post,headers, timeout=timeout)
except:
data=""
# CACHE_SIEMPRE: Siempre descarga de cache, sin comprobar fechas, excepto cuando no está
elif modoCache == CACHE_SIEMPRE:
logger.info("[scrapertools.py] MODO_CACHE=1 (cachear todo)")
# Obtiene los handlers del fichero en la cache
cachedFile, newFile = getCacheFileNames(url)
# Si no hay ninguno, descarga
if cachedFile == "":
logger.debug("[scrapertools.py] No está en cache")
# Lo descarga
data = downloadpage(url,post,headers)
# Lo graba en cache
outfile = open(newFile,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("[scrapertools.py] Grabado a " + newFile)
else:
logger.info("[scrapertools.py] Leyendo de cache " + cachedFile)
infile = open( cachedFile )
data = infile.read()
infile.close()
# CACHE_ACTIVA: Descarga de la cache si no ha cambiado
else:
logger.info("[scrapertools.py] MODO_CACHE=0 (automática)")
# Datos descargados
data = ""
# Obti
|
ene los handlers del fichero en la cache
cachedFile, newFile = getCacheFileNames(url)
# Si no hay ninguno, descarga
if cachedFile == "":
logger.debug("[scrapertools.py] No está en cache")
# Lo descarga
data = downloadpage(url,post,headers)
# Lo graba en cache
outfile = open(newFile,"w")
outfile.write(data)
outfile.flush()
outfile.close()
|
logger.info("[scrapertools.py] Grabado a " + newFile)
# Si sólo hay uno comprueba el timestamp (hace una petición if-modified-since)
else:
# Extrae el timestamp antiguo del nombre del fichero
oldtimestamp = time.mktime( time.strptime(cachedFile[-20:-6], "%Y%m%d%H%M%S") )
logger.info("[scrapertools.py] oldtimestamp="+cachedFile[-20:-6])
logger.info("[scrapertools.py] oldtimestamp="+time.ctime(oldtimestamp))
# Hace la petición
updated,data = downloadtools.downloadIfNotModifiedSince(url,oldtimestamp)
# Si ha cambiado
if updated:
# Borra el viejo
logger.debug("[scrapertools.py] Borrando "+cachedFile)
os.remove(cachedFile)
# Graba en cache el nuevo
outfile = open(newFile,"w")
outfile.write(data)
outfile.flush()
outfile.close()
logger.info("[scrapertools.py] Grabado a " + newFile)
# Devuelve el contenido del fichero de la cache
else:
logger.info("[scrapertools.py] Leyendo de cache " + cachedFile)
infile = open( cachedFile )
data = infile.read()
infile.close()
return data
def getCacheFileNames(url):
# Obtiene el directorio de la cache para esta url
siteCachePath = getSiteCachePath(url)
# Obtiene el ID de la cache (md5 de la URL)
cacheId = get_md5(url)
logger.debug("[scrapertools.py] cacheId="+cacheId)
# Timestamp actual
nowtimestamp = time.strftime("%Y%m%d%H%M%S", time.localtime())
logger.debug("[scrapertools.py] nowtimestamp="+nowtimestamp)
# Nombre del fichero
# La cache se almacena en una estructura CACHE + URL
ruta = os.path.join( siteCachePath , cacheId[:2] , cacheId[2:] )
newFile = os.path.join( ruta , nowtimestamp + ".cache" )
logger.debug("[scrapertools.py] newFile="+newFile)
if not os.path.exists(ruta):
os.makedirs( ruta )
# Busca ese fichero en la cache
cachedFile = getCachedFile(siteCachePath,cacheId)
return cachedFile, newFile
# Busca ese fichero en la cache
def getCachedFile(siteCachePath,cacheId):
mascara = os.path.join(siteCachePath,cacheId[:2],cacheId[2:],"*.cache")
logger.debug("[scrapertools.py] mascara="+mascara)
import glob
ficheros = glob.glob( mascara )
logger.debug("[scrapertools.py] Hay %d ficheros con ese id" % len(ficheros))
cachedFile = ""
# Si hay más de uno, los borra (serán pruebas de programación) y descarga de nuevo
if len(ficheros)>1:
logger.debug("[scrapertools.py] Cache inválida")
for fichero in ficheros:
logger.debug("[scrapertools.py] Borrando "+fichero)
os.remove(fichero)
cachedFile = ""
# Hay uno: fichero cacheado
elif len(ficheros)==1:
cachedFile = ficheros[0]
return cachedFile
def getSiteCachePath(url):
# Obtiene el dominio principal de la URL
dominio = urlparse.urlparse(url)[1]
logger.debug("[scrapertools.py] dominio="+dominio)
nombres = dominio.split(".")
if len(nombres)>1:
dominio = nombres[len(nombres)-2]+"."+nombres[len(nombres)-1]
else:
dominio = nombres[0]
logger.debug("[scrapertools.py] dominio="+dominio)
# Crea un directorio en la cache para direcciones de ese dominio
siteCachePath = os.path.join( CACHE_PATH , dominio )
if not os.path.exists(CACHE_PATH):
try:
os.mkdir( CACHE_PATH )
except:
|
conan-io/conan-package-tools
|
cpt/test/integration/base.py
|
Python
|
mit
| 1,994
| 0.001003
|
import os
import unittest
from conans.util.files import mkdir_tmp
from conans import __version__ as client_version
from conans import tools
from conans.client.conan_api import ConanAPIV1
from cpt.test.utils.tools import TestBufferConanOutput
CONAN_UPLOAD_URL = os.getenv("CONAN_UPLOAD_URL",
"https://conan.jfrog.io/conan/api/conan/conan-testsuite")
CONAN_UPLOAD_PASSWORD = os.getenv("CONAN_UPLOAD_PASSWORD", "")
CONAN_LOGIN_UPLOAD = os.getenv("CONAN_LOGIN_UPLOAD", "")
class BaseTest(unittest.TestCase):
def setUp(self):
self.old_folder = os.getcwd()
self.tmp_folder = mkdir_tmp()
os.chmod(self.tmp_folder, 0o777)
self.conan_home = self.tmp_folder
os.chdir(self.tmp_folder)
# user_home = "c:/tmp/home" # Cache
self.old_env = dict(os.environ)
os.environ.update({"CONAN_USER_HOME": self.conan_home, "CONAN_PIP_PACKAGE": "0"})
self.outp
|
ut = TestBufferConanOutput()
self.api, _, _ = ConanAPIV1.factory()
self.api.create_app()
self.client_cache = self.api.app.cache
def tearDown(self):
os.chdir(self.old_folder)
os.environ.clear()
os.environ.update(self.old_env)
def save_conanfile(self, conanfile):
|
tools.save(os.path.join(self.tmp_folder, "conanfile.py"), conanfile)
def create_project(self):
with tools.chdir(self.tmp_folder):
if tools.Version(client_version) >= "1.32.0":
self.api.new("hello/0.1.0", pure_c=True, exports_sources=True)
else:
self.api.new("hello/0.1.0")
@property
def root_project_folder(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
for i in range(10):
if "setup.py" in os.listdir(dir_path):
return dir_path
else:
dir_path = os.path.abspath(os.path.join(dir_path, os.pardir))
raise Exception("Cannot find root project folder")
|
SirRujak/SirBot
|
dev/TOPSECRET/SirBot/SirBot.py
|
Python
|
mit
| 1,743
| 0.018933
|
# -*- coding: utf-8 -*-
#main sirbot script
ON = 1
SPLASH = 1
##try:
import lib.sirbot.initialize as initialize
if(SPLASH == 1):
#display splash
splash = initialize.splashing()
root = splash.root()
#import configurations
f
|
rom lib.sirbot.configloader import configloader
config = configloader()
#import main runtime classes
from lib.sirbot.application i
|
mport application
if(config['GUI'] == 1):
from lib.sirbot.interfaceDEV import interface
from lib.sirbot.assetloader import assetloader
#import shutdown module
from lib.sirbot.shutdown import shutdown
#import tools
from multiprocessing import Queue
from time import sleep
if __name__ == '__main__':
#initialize primary modules and queues
if(config['GUI'] == 1):
interinput = Queue()
interoutput = Queue()
assets = assetloader()
inter = interface(config,assets,interinput,interoutput,root)
app = application(config,interinput,interoutput)
else:
app = application(config)
#destroy splash
if(SPLASH == True):
sleep(1)
splash.destroySplash()
#runtime loop - single thread
idle = 0.01
if(config['GUI'] == 1):
inter.display()
app.begin()#temporary
while(ON):
ON = ON * app.tick()
ON = ON * inter.tick()
sleep(idle)
app.shutdown()
inter.shutdown()
else:
app.begin()#temporary
while(ON):
ON = ON * app.tick()
sleep(idle)
app.shutdown()
#send current configuration options to be saved for next startup
if(config['GUI'] == 1):
shutdown(config,interinput,interoutput)
else:
shutdown(config)
##except:
## pass
|
GeoscienceAustralia/eo-datasets
|
eodatasets3/model.py
|
Python
|
apache-2.0
| 3,589
| 0.001115
|
from pathlib import Path
from typing import Tuple, Dict, Optional, List, Union
from uuid import UUID
import affine
import attr
from ruamel.yaml.comments import CommentedMap
from shapely.geometry.base import BaseGeometry
from eodatasets3.properties import Eo3Dict, Eo3Interface
DEA_URI_PREFIX = "https://collections.dea.ga.gov.au"
ODC_DATASET_SCHEMA_URL = "https://schemas.opendatacube.org/dataset"
# Either a local filesystem path or a string URI.
# (the URI can use any scheme supported by rasterio, such as tar:// or https:// or ...)
Location = Union[Path, str]
@attr.s(auto_attribs=True, slots=True)
class ProductDoc:
"""
The product that this dataset belongs to.
"name" is the local name in ODC.
href is intended as a more global unique "identifier" uri for the product.
"""
name: str = None
href: str = None
@attr.s(auto_attribs=True, slots=True, hash=True)
class GridDoc:
"""The grid describing a measurement/band's pixels"""
shape: Tuple[int, int]
transform: affine.Affine
@attr.s(auto_attribs=True, slots=True)
class MeasurementDoc:
"""
A Dataset's reference to a measurement file.
"""
path: str
band: Optional[int] = 1
layer: Optional[str] = None
grid: str = "default"
name: str = attr.ib(metadata=dict(doc_exclude=True), default=None)
alias: str = attr.ib(metadata=dict(doc_exclude=True), default=None)
@attr.s(auto_attribs=True, slots=True)
class AccessoryDoc:
"""
An accessory is an extra file included in the dataset that is not
a measurement/band.
For example: thumbnails, alternative metadata documents, or checksum files.
"""
path: str
type: str = None
name: str = attr.ib(metadata=dict(doc_exclude=True), default=None)
@attr.s(auto_attribs=True, slots=True)
class DatasetDoc(Eo3Interface):
"""
An EO3 dataset document
Includes :class:`.Eo3Interface` methods
|
for metadata access::
>>> p = DatasetDoc()
>>> p.platform = 'LANDSAT_8'
>>> p.processed = '2018-04-03'
>>> p.properties['odc:processing_datetime']
datetime.da
|
tetime(2018, 4, 3, 0, 0, tzinfo=datetime.timezone.utc)
"""
#: Dataset UUID
id: UUID = None
#: Human-readable identifier for the dataset
label: str = None
#: The product name (local) and/or url (global)
product: ProductDoc = None
#: Location(s) where this dataset is stored.
#:
#: (ODC supports multiple locations when the same dataset is stored in multiple places)
#:
#: They are fully qualified URIs (``file://...`, ``https://...``, ``s3://...``)
#:
#: All other paths in the document (measurements, accessories) are relative to the
#: chosen location.
locations: List[str] = None
#: CRS string. Eg. ``epsg:3577``
crs: str = None
#: Shapely geometry of the valid data coverage
#:
#: (it must contain all non-empty pixels of the image)
geometry: BaseGeometry = None
#: Grid specifications for measurements
grids: Dict[str, GridDoc] = None
#: Raw properties
properties: Eo3Dict = attr.ib(factory=Eo3Dict)
#: Loadable measurements of the dataset
measurements: Dict[str, MeasurementDoc] = None
#: References to accessory files
#:
#: Such as thumbnails, checksums, other kinds of metadata files.
#:
#: (any files included in the dataset that are not measurements)
accessories: Dict[str, AccessoryDoc] = attr.ib(factory=CommentedMap)
#: Links to source dataset uuids
lineage: Dict[str, List[UUID]] = attr.ib(factory=CommentedMap)
|
boriel/zxbasic
|
src/arch/z80/backend/runtime/datarestore.py
|
Python
|
gpl-3.0
| 274
| 0
|
# Run
|
time labels
from .namespace import NAMESPACE
class DataRestoreLabels:
READ = f"{NAMESPACE}.__READ"
RESTORE = f"{NAMESPACE}.__RESTORE"
REQUIRED_MODULES = {
DataRestoreLabels.
|
READ: "read_restore.asm",
DataRestoreLabels.RESTORE: "read_restore.asm",
}
|
nijel/weblate
|
weblate/utils/requests.py
|
Python
|
gpl-3.0
| 2,265
| 0
|
#
# Copyright © 2012–2022 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Found
|
ation, either version 3 of the License, or
# (at your option) any
|
later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import requests
from django.core.cache import cache
from weblate.logger import LOGGER
from weblate.utils.errors import report_error
from weblate.utils.version import USER_AGENT
def request(method, url, headers=None, **kwargs):
agent = {"User-Agent": USER_AGENT}
if headers:
headers.update(agent)
else:
headers = agent
response = requests.request(method, url, headers=headers, **kwargs)
response.raise_for_status()
return response
def get_uri_error(uri):
"""Return error for fetching the URL or None if it works."""
if uri.startswith("https://nonexisting.weblate.org/"):
return "Non existing test URL"
cache_key = f"uri-check-{uri}"
cached = cache.get(cache_key)
if cached is True:
LOGGER.debug("URL check for %s, cached success", uri)
return None
if cached:
# The cache contains string here
LOGGER.debug("URL check for %s, cached failure", uri)
return cached
try:
with request("get", uri, stream=True):
cache.set(cache_key, True, 12 * 3600)
LOGGER.debug("URL check for %s, tested success", uri)
return None
except requests.exceptions.RequestException as error:
report_error(cause="URL check failed")
if getattr(error.response, "status_code", 0) == 429:
# Silently ignore rate limiting issues
return None
result = str(error)
cache.set(cache_key, result, 3600)
return result
|
hello-base/web
|
apps/history/models.py
|
Python
|
apache-2.0
| 1,311
| 0.001526
|
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from model_utils import Choices
from model_utils.models import TimeStampedModel
class History(TimeStampedModel):
RESOLUTIONS = Choices('second', 'minute', 'hour', 'day', 'week', 'month', 'year')
resolution = models.CharField(choices=RESOLUTIONS, default=RESOLUTIONS.day, max_length=6)
tag = models.SlugField()
datetime = models.DateTimeField()
source_type = models.ForeignKey(ContentType)
source_id = models.PositiveIntegerField(blank=True, null=True)
source_object = GenericForeignKey('source_type', 'source_id')
sum = models.IntegerField(default=0)
delta = models.IntegerField(default=0)
class Meta:
get_latest_by = 'datetime'
verbose_name_plural = 'histories'
def __unicode__(self):
return u'%s' % (self.tag)
def save(self, *args, **kwargs):
try:
filters = {'resolution': self.resolution, 'tag': self.tag}
previous = self._default_manager.filter(**filters).latest()
except self._meta.model.Doe
|
sNotExist:
pass
else:
self.del
|
ta = self.sum - previous.sum
super(History, self).save(*args, **kwargs)
|
SMALLplayer/smallplayer-image-creator
|
storage/.xbmc/addons/script.module.urlresolver/lib/urlresolver/plugins/vidhog.py
|
Python
|
gpl-2.0
| 4,269
| 0.008433
|
'''
Vidhog urlresolver plugin
Copyright (C) 2013 Vinnydude
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
import re, time
from urlresolver import common
net = Net()
class VidhogResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "vidhog"
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
def get_media_url(self, host, media_id):
try:
url = self.get_url(host, media_id)
html = self.net.http_GET(url).content
check = re.compile('fname').findall(html)
if check:
data = {}
r = re.findall(r'type="(?:hidden|submit)?" name="(.+?)"\s* value="?(.+?)">', html)
for name, value in r:
data[name] = value
html = net.http_POST(url, data).content
else:
data = {}
r = re.findall(r'type="(?:hidden|submit)?" name="(.+?)"\s* value="?(.+?)">', html)
for name, value in r:
data[name] = value
captchaimg = re.search('<img src="(http://www.vidhog.com/captchas/.+?)"', html)
if captchaimg:
img = xbmcgui.ControlImage(550,15,240,100,captchaimg.group(1))
wdlg = xbmcgui.WindowDialog()
wdlg.addControl(img)
wdlg.show()
|
time.sleep(3)
kb = xbmc.Keyboard('', 'Type the letters in the image', False)
kb.doModal()
|
capcode = kb.getText()
if (kb.isConfirmed()):
userInput = kb.getText()
if userInput != '':
capcode = kb.getText()
elif userInput == '':
raise Exception ('You must enter text in the image to access video')
wdlg.close()
common.addon.show_countdown(10, title='Vidhog', text='Loading Video...')
data.update({'code':capcode})
else:
common.addon.show_countdown(20, title='Vidhog', text='Loading Video...')
html = net.http_POST(url, data).content
if re.findall('err', html):
raise Exception('Wrong Captcha')
match = re.search("product_download_url=(.+?)'", html)
if not match:
raise Exception('could not find video')
return match.group(1)
except Exception, e:
common.addon.log('**** Muchshare Error occured: %s' % e)
common.addon.show_small_popup('Error', str(e), 5000, '')
return self.unresolvable(code=0, msg='Exception: %s' % e)
def get_url(self, host, media_id):
return 'http://www.vidhog.com/%s' % media_id
def get_host_and_id(self, url):
r = re.search('//(.+?)/([0-9a-zA-Z]+)',url)
if r:
return r.groups()
else:
return False
return('host', 'media_id')
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return (re.match('http://(www.)?vidhog.com/' +
'[0-9A-Za-z]+', url) or
'vidhog' in host)
|
vijaykumar0690/security_monkey
|
security_monkey/watchers/security_group.py
|
Python
|
apache-2.0
| 8,192
| 0.001831
|
# Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.watchers.security_group
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Patrick Kelley <pkelley@netflix.com> @monkeysecurity
"""
from security_monkey.watcher import Watcher
from security_monkey.watcher import ChangeItem
from security_monkey.constants import TROUBLE_REGIONS
from security_monkey.exceptions import BotoConnectionIssue
from security_monkey import app
class SecurityGroup(Watcher):
index = 'securitygroup'
i_am_singular = 'Security Group'
i_am_plural = 'Security Groups'
def __init__(self, accounts=None, debug=False):
super(SecurityGroup, self).__init__(accounts=accounts, debug=debug)
# TODO: grab those from DB
self.instance_detail = app.config.get("SECURITYGROUP_INSTANCE_DETAIL", 'FULL')
self.honor_ephemerals = True
self.ephemeral_paths = ["assigned_to"]
def get_detail_level(self):
""" Return details level: 'NONE' / 'SUMMARY' / 'FULL' """
if self.instance_detail:
return self.instance_detail
else:
return 'NONE'
def slurp(self):
"""
:returns: item_list - list of Security Groups.
:returns: exception_map - A dict where the keys are a tuple containing the
location of the exception and the value is the actual exception
"""
self.prep_for_slurp()
item_list = []
exception_map = {}
from security_monkey.common.sts_connect import connect
for account in self.accounts:
try:
ec2 = connect(account, 'ec2')
regions = ec2.get_all_regions()
except Exception as e: # EC2ResponseError
# Some Accounts don't subscribe to EC2 and will throw an exception here.
exc = BotoConnectionIssue(str(e), self.index, account, None)
self.slurp_exception((self.index, account), exc, exception_map)
continue
for region in regions:
app.logger.debug("Checking {}/{}/{}".format(self.index, account, region.name))
try:
rec2 = connect(account, 'ec2', region=region)
# Retrieve security groups here
sgs = self.wrap_aws_rate_limited_call(
rec2.get_all_security_groups
)
if self.get_detail_level() != 'NONE':
# We fetch tags here to later correlate instances
tags = self.wrap_aws_rate_limited_call(
rec2.get_all_tags
)
# Retrieve all instances
instances = self.wrap_aws_rate_limited_call(
rec2.get_only_instances
)
app.logger.info("Number of instances found in region {}: {}".format(region.name, len(instances)))
except Exception as e:
if region.name not in TROUBLE_REGIONS:
exc = BotoConnectionIssue(str(e), self.index, account, region.name)
self.slurp_exception((self.index, account, region.name), exc, exception_map)
continue
app.logger.debug("Found {} {}".format(len(sgs), self.i_am_plural))
if self.get_detail_level() != 'NONE':
app.logger.info("Creating mapping of sg_id's to instances")
# map sgid => instance
sg_instances = {}
for instance in instances:
for group in instance.groups:
if group.id not in sg_instances:
sg_instances[group.id] = [instance]
else:
sg_instances[group.id].append(instance)
app.logger.info("Creating mapping of instance_id's to tags")
# map instanceid => tags
instance_tags = {}
for tag in tags:
if tag.res_id not in instance_tags:
instance_tags[tag.res_id] = [tag]
else:
instance_tags[tag.res_id].append(tag)
app.logger.info("Done creating mappings")
for sg in sgs:
if self.check_ignore_list(sg.name):
continue
item_config = {
"id": sg.id,
|
"name": sg.name,
"description": sg.description,
"vpc_id": sg.vpc_id,
"owner_id": sg.owner_id,
"region": sg.region.name,
"rules": [],
"a
|
ssigned_to": None
}
for rule in sg.rules:
for grant in rule.grants:
rule_config = {
"ip_protocol": rule.ip_protocol,
"from_port": rule.from_port,
"to_port": rule.to_port,
"cidr_ip": grant.cidr_ip,
"group_id": grant.group_id,
"name": grant.name,
"owner_id": grant.owner_id
}
item_config['rules'].append(rule_config)
item_config['rules'] = sorted(item_config['rules'])
if self.get_detail_level() == 'SUMMARY':
if sg.id in sg_instances:
item_config["assigned_to"] = "{} instances".format(len(sg_instances[sg.id]))
else:
item_config["assigned_to"] = "0 instances"
elif self.get_detail_level() == 'FULL':
assigned_to = []
if sg.id in sg_instances:
for instance in sg_instances[sg.id]:
if instance.id in instance_tags:
tagdict = {tag.name: tag.value for tag in instance_tags[instance.id]}
tagdict["instance_id"] = instance.id
else:
tagdict = {"instance_id": instance.id}
assigned_to.append(tagdict)
item_config["assigned_to"] = assigned_to
# Issue 40: Security Groups can have a name collision between EC2 and
# VPC or between different VPCs within a given region.
if sg.vpc_id:
sg_name = "{0} ({1} in {2})".format(sg.name, sg.id, sg.vpc_id)
else:
sg_name = "{0} ({1})".format(sg.name, sg.id)
item = SecurityGroupItem(region=region.name, account=account, name=sg_name, config=item_config)
item_list.append(item)
return item_list, exception_map
class SecurityGroupItem(ChangeItem):
def __init__(self, region=None, account=None, name=None, config={}):
super(SecurityGroupItem, self).__init__(
index=SecurityGroup.index,
region=region,
account=account,
name=name,
new_con
|
feng-zhe/ZheQuant-brain-python
|
zq_calc/inc_pct.py
|
Python
|
apache-2.0
| 2,959
| 0.005069
|
'''
This module contains calculator to calculate the increased percentage
'''
import json
import datetime
import pytz
import zq_gen.str as zq_str
import zq_db.mongodb as zq_mgdb
key_err_msg = 'Missing parameter in command string'
val_err_msg = 'Error in parsing the command string'
no_rcrd_msg = 'No records in specified date'
begin_zero_msg = 'Begin value is zero, cannot calculate'
def inc_pct(cmd_str):
'''
Calculate the increasement by percentage
Args:
cmd_str: A string of parameters
-c: The json of stock composition
e.g.
{
"test_code1": 200,
"test_code2": 300,
...
}
-b: A string shows the begin date in format YYYYMMDD
-e: A string shows the end date in format YYYYMMDD
Returns:
A number with four decimals indicating the increasement percentage if succeed.
e.g. return 0.0111 means 1.11%
Otherwise returns a string indicating the problem.
Raises:
N/A
'''
try:
compo, begin, end = _parse_cmd(cmd_str)
except KeyError:
return key_err_msg
|
except ValueError:
return val_err_msg
begin_value =
|
0
end_value = 0
for code, num in compo.items():
begin_doc = zq_mgdb.get_single_stock_data(code, begin)
end_doc = zq_mgdb.get_single_stock_data(code, end)
if not begin_doc or not end_doc:
return no_rcrd_msg
begin_value += begin_doc['close'] * num
end_value += end_doc['close'] * num
if not begin_value:
return begin_zero_msg
return round((end_value - begin_value) / begin_value, 4)
def _parse_cmd(cmd_str):
'''
Parse the command string to get arguments
Args:
cmd_str: A string of parameters
Returns:
A tuple (compo, begin, end) in which:
The compo is the dictionary containing composition info.
The begin is the datetime object for begin date.
The end is the datatime object for end date.
Raises:
KeyError: Thrown when there is missing parameters in command string
ValueError: Thrown when the input date string is not correct
'''
cmd_dict = zq_str.cmd_str2dic(cmd_str)
compo_str = cmd_dict['-c']
begin_str = cmd_dict['-b']
end_str = cmd_dict['-e']
begin_year = int(begin_str[0:4])
begin_month = int(begin_str[4:6])
begin_day = int(begin_str[6:])
end_year = int(end_str[0:4])
end_month = int(end_str[4:6])
end_day = int(end_str[6:])
tzinfo = pytz.timezone('Asia/Shanghai')
begin = datetime.datetime(begin_year, begin_month, begin_day, tzinfo=tzinfo)
end = datetime.datetime(end_year, end_month, end_day, tzinfo=tzinfo)
compo = json.loads(compo_str)
return compo, begin, end
|
3liz/QgisQuickOSMPlugin
|
QuickOSM/definitions/format.py
|
Python
|
gpl-2.0
| 657
| 0.001522
|
"""Definitions for ou
|
tput formats."""
import collections
from enum imp
|
ort Enum, unique
__copyright__ = 'Copyright 2021, 3Liz'
__license__ = 'GPL version 3'
__email__ = 'info@3liz.org'
format_output = collections.namedtuple('format', ['label', 'driver_name', 'extension'])
@unique
class Format(Enum):
""" Name of output formats."""
GeoJSON = format_output('GeoJSON', 'GeoJSON', 'geojson')
"""GeoJSON"""
GeoPackage = format_output('GeoPackage', 'GPKG', 'gpkg')
"""GeoPackage"""
Shapefile = format_output('ESRI Shapefile', 'ESRI Shapefile', 'shp')
"""Shapefile"""
Kml = format_output('Kml', 'KML', 'kml')
"""Kml"""
|
paralab/Dendro4
|
slurm/slurm_pbs.py
|
Python
|
gpl-2.0
| 2,491
| 0.028904
|
# @author: Milinda Fernando
# School of Computing, University of Utah.
# generate all the slurm jobs for the sc16 poster, energy measurements,
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='slurm_pbs')
parser.add_argument('-n','--npes', help=' number of mpi tasks')
parser.add_argument('-N','--N',help='number of cpu cores per single mpi task')
parser.add_argument('-g','--grainSz',help='grain size of the problem')
parser.add_argument('-i','--Iterations',help='number of iterations for the matvec operation')
#parser.add_argument('--tolBegin',help='tol begin value')
#parser.add_argument('--tolEnd', help='tol end value')
#parser.add_argument('--tolStep',help='tol step')
parser.add_argument('--SFC',help='SFC method H or M')
args=parser.parse_args()
TolList=[1e-5,1e-4,1e-3,1e-2,1e-1,2e-1,3e-1]
for tol in TolList:
fileName='sc16_fpart_'+a
|
rgs.SFC+'_'+str(tol)+'.pbs'
pbs_file=open(fileName,'w')
pbs_file.write('#!/bin/bash\n')
pbs_file.write('#SBATCH --ntasks='+args.npes+'\n')
pbs_file.write('#SBATCH --cpus-per-task='+args.N+'\n')
pbs_file.write('#SBATCH -o /exp-share/dendro/build/sc16-poster-final-jobs/%J.out\n')
pbs_file.write('#SBATCH -e /exp-share/dendro/build/sc16-poster-final-jobs/%J.err\n')
pbs_file.write('#SBATCH --time=24:00:00\n')
pbs_file.write('#SBATCH --account=perf\n')
pbs_fi
|
le.write('n='+args.N+'\n')
pbs_file.write('inputFile=ip\n')
pbs_file.write('numPts='+args.grainSz+'\n')
pbs_file.write('dim=3\n')
pbs_file.write('maxDepth=30\n')
pbs_file.write('solvU=0\n')
pbs_file.write('writeB=0\n')
pbs_file.write('k=1\n')
pbs_file.write('inCorner=1\n')
pbs_file.write('numLoops='+args.Iterations+'\n')
pbs_file.write('compress=0\n')
pbs_file.write('export OMP_NUM_THREADS=1\n')
pbs_file.write('cd /exp-share/dendro/build\n')
pbs_file.write('tol=0.000001\n')
if args.SFC=='H':
pbs_file.write('echo \'Executing Hilbert\'\n')
pbs_file.write('mpirun --allow-run-as-root -np $n ./tstTreeSortMatVec_h $inputFile $numPts $dim $maxDepth $tol 1 $solvU $writeB $k $inCorner $numLoops $compress\n')
elif args.SFC=='M':
pbs_file.write('echo \'Executing Morton\'\n')
pbs_file.write('mpirun --allow-run-as-root -np $n ./tstTreeSortMatVec_m $inputFile $numPts $dim $maxDepth $tol 1 $solvU $writeB $k $inCorner $numLoops $compress\n')
pbs_file.close()
|
CodeNameGhost/shiva
|
thirdparty/scapy/layers/tls/automaton_srv.py
|
Python
|
mit
| 30,139
| 0.001161
|
## This file is part of Scapy
## Copyright (C) 2007, 2008, 2009 Arnaud Ebalard
## 2015, 2016, 2017 Maxence Tury
## This program is published under a GPLv2 license
"""
TLS server automaton. This makes for a primitive TLS stack.
Obviously you need rights for network access.
We support versions SSLv2 to TLS 1.2, along with many features.
There is no session resumption mechanism for now.
In order to run a server listening on tcp/4433:
> from scapy.all import *
> t = TLSServerAutomaton(mycert='<cert.pem>', mykey='<key.pem>')
> t.run()
"""
from __future__ import print_function
import socket
from scapy.utils import randstring, repr_hex
from scapy.automaton import ATMT
from scapy.layers.tls.automaton import _TLSAutomaton
from scapy.layers.tls.cert import PrivKeyRSA, PrivKeyECDSA
from scapy.layers.tls.basefields import _tls_version
from scapy.layers.tls.session import tlsSession
from scapy.layers.tls.handshake import *
from scapy.layers.tls.handshake_sslv2 import *
from scapy.layers.tls.record import (TLS, TLSAlert, TLSChangeCipherSpec,
TLSApplicationData)
from scapy.layers.tls.crypto.suites import (_tls_cipher_suites_cls,
get_usable_ciphersuites)
class TLSServerAutomaton(_TLSAutomaton):
"""
A simple TLS test server automaton. Try to overload some states or
conditions and see what happens on the other side.
Because of socket and automaton limitations, for now, the best way to
interrupt the server is by sending him 'stop_server'. Interruptions with
Ctrl-Z should work, but this might leave a loose listening socket behind.
In case the server receives a TLSAlert (whatever its type), or a 'goodbye'
message in a SSLv2 version, he will close the client session with a
similar message, and start waiting for new client connections.
_'mycert' and 'mykey' may be provided as filenames. They are needed for any
server authenticated handshake.
_'preferred_ciphersuite' allows the automaton to choose a cipher suite when
offered in the ClientHello. If absent, another one will be chosen.
_'client_auth' means the client has to provide a certificate.
_'is_echo_server' means that everything received will be sent back.
_'max_client_idle_time' is the maximum silence duration from the client.
Once this limit has been reached, the client (if still here) is dropped,
and we wait for a new connection.
"""
def parse_args(self, server="127.0.0.1", sport=4433,
mycert=None, mykey=None,
preferred_ciphersuite=None,
client_auth=False,
is_echo_server=True,
max_client_idle_time=60,
**kargs):
super(TLSServerAutomaton, self).parse_args(mycert=mycert,
mykey=mykey,
**kargs)
try:
if ':' in server:
socket.inet_pton(socket.AF_INET6, server)
else:
socket.inet_pton(socket.AF_INET, server)
tmp = socket.getaddrinfo(server, sport)
except:
tmp = socket.getaddrinfo(socket.getfqdn(server), sport)
self.serversocket = None
self.ip_family = tmp[0][0]
self.local_ip = tmp[0][4][0]
self.local_port = sport
self.remote_ip = None
self.remote_port = None
self.preferred_ciphersuite = preferred_ciphersuite
self.client_auth = client_auth
self.is_echo_server = is_echo_server
self.max_client_idle_time = max_client_idle_time
def vprint_sessioninfo(self):
if self.verbose:
s = self.cur_session
v = _tls_version[s.tls_version]
self.vprint("Version : %s" % v)
cs = s.wcs.ciphersuite.name
self.vprint("Cipher suite : %s" % cs)
ms = s.master_secret
self.vprint("Master secret : %s" % repr_hex(ms))
if s.client_certs:
self.vprint("Client certificate chain: %r" % s.client_certs)
self.vprint()
def http_sessioninfo(self):
header = "HTTP/1.1 200 OK\r\n"
header += "Server: Scapy TLS Extension\r\n"
header += "Content-type: text/html\r\n"
header += "Content-length: %d\r\n\r\n"
s = "----- Scapy TLS Server Automaton -----\n\n"
s += "Information on current TLS session:\n\n"
s += "Local end : %s:%d\n"
|
% (self.local_ip, self.local_port)
s += "Remote end : %s:%d\n" % (self.remote_ip, self.remote_port)
v
|
= _tls_version[self.cur_session.tls_version]
s += "Version : %s\n" % v
cs = self.cur_session.wcs.ciphersuite.name
s += "Cipher suite : %s\n" % cs
ms = self.cur_session.master_secret
s += "Master secret : %s\n" % repr_hex(ms)
body = "<html><body><pre>%s</pre></body></html>\r\n\r\n" % s
answer = (header+body) % len(body)
return answer
@ATMT.state(initial=True)
def INITIAL(self):
self.vprint("Starting TLS server automaton.")
self.vprint("Receiving 'stop_server' will cause a graceful exit.")
self.vprint("Interrupting with Ctrl-Z might leave a loose socket hanging.")
raise self.BIND()
@ATMT.state()
def BIND(self):
s = socket.socket(self.ip_family, socket.SOCK_STREAM)
self.serversocket = s
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
s.bind((self.local_ip, self.local_port))
s.listen(1)
except:
m = "Unable to bind on %s:%d!" % (self.local_ip, self.local_port)
self.vprint()
self.vprint(m)
self.vprint("Maybe some server is already listening there?")
self.vprint()
raise self.FINAL()
raise self.WAITING_CLIENT()
@ATMT.state()
def WAITING_CLIENT(self):
self.vprint()
self.vprint("Waiting for a new client on %s:%d" % (self.local_ip,
self.local_port))
self.socket, addr = self.serversocket.accept()
if not isinstance(addr, tuple):
addr = self.socket.getpeername()
if len(addr) > 2:
addr = (addr[0], addr[1])
self.remote_ip, self.remote_port = addr
self.vprint("Accepted connection from %s:%d" % (self.remote_ip,
self.remote_port))
self.vprint()
raise self.INIT_TLS_SESSION()
@ATMT.state()
def INIT_TLS_SESSION(self):
"""
XXX We should offer the right key according to the client's suites. For
now server_rsa_key is only used for RSAkx, but we should try to replace
every server_key with both server_rsa_key and server_ecdsa_key.
"""
self.cur_session = tlsSession(connection_end="server")
self.cur_session.server_certs = [self.mycert]
self.cur_session.server_key = self.mykey
if isinstance(self.mykey, PrivKeyRSA):
self.cur_session.server_rsa_key = self.mykey
#elif isinstance(self.mykey, PrivKeyECDSA):
# self.cur_session.server_ecdsa_key = self.mykey
raise self.WAITING_CLIENTFLIGHT1()
@ATMT.state()
def WAITING_CLIENTFLIGHT1(self):
self.get_next_msg()
raise self.RECEIVED_CLIENTFLIGHT1()
@ATMT.state()
def RECEIVED_CLIENTFLIGHT1(self):
pass
########################### TLS handshake #################################
@ATMT.condition(RECEIVED_CLIENTFLIGHT1, prio=1)
def should_handle_ClientHello(self):
self.raise_on_packet(TLSClientHello,
self.HANDLED_CLIENTHELLO)
@ATMT.state()
def HANDLED_CLIENTHELLO(self):
raise self.PREPARE_SERVERFLIGHT1()
@ATMT.condition(HANDLED_CLIENTHELLO)
def should_check_ciphersuites(self):
"""
We extract cipher suites candidates from the client's proposition.
|
lazlolazlolazlo/onionshare
|
onionshare/settings.py
|
Python
|
gpl-3.0
| 4,545
| 0.00176
|
# -*- coding: utf-8 -*-
"""
OnionShare | https://onionshare.org/
Copyright (C) 2017 Micah Lee <micah@micahflee.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
ME
|
RCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import json
import os
import platform
from . import strings, common
class Settings(object):
"""
This class stores all of the settings for OnionShare, specifically for how
to conn
|
ect to Tor. If it can't find the settings file, it uses the default,
which is to attempt to connect automatically using default Tor Browser
settings.
"""
def __init__(self, config=False):
common.log('Settings', '__init__')
# Default config
self.filename = self.build_filename()
# If a readable config file was provided, use that instead
if config:
if os.path.isfile(config):
self.filename = config
else:
common.log('Settings', '__init__', 'Supplied config does not exist or is unreadable. Falling back to default location')
# These are the default settings. They will get overwritten when loading from disk
self.default_settings = {
'version': common.get_version(),
'connection_type': 'bundled',
'control_port_address': '127.0.0.1',
'control_port_port': 9051,
'socks_address': '127.0.0.1',
'socks_port': 9050,
'socket_file_path': '/var/run/tor/control',
'auth_type': 'no_auth',
'auth_password': '',
'close_after_first_download': True,
'systray_notifications': True,
'use_stealth': False,
'use_autoupdate': True,
'autoupdate_timestamp': None
}
self._settings = {}
self.fill_in_defaults()
def fill_in_defaults(self):
"""
If there are any missing settings from self._settings, replace them with
their default values.
"""
for key in self.default_settings:
if key not in self._settings:
self._settings[key] = self.default_settings[key]
def build_filename(self):
"""
Returns the path of the settings file.
"""
p = platform.system()
if p == 'Windows':
appdata = os.environ['APPDATA']
return '{}\\OnionShare\\onionshare.json'.format(appdata)
elif p == 'Darwin':
return os.path.expanduser('~/Library/Application Support/OnionShare/onionshare.json')
else:
return os.path.expanduser('~/.config/onionshare/onionshare.json')
def load(self):
"""
Load the settings from file.
"""
common.log('Settings', 'load')
# If the settings file exists, load it
if os.path.exists(self.filename):
try:
common.log('Settings', 'load', 'Trying to load {}'.format(self.filename))
with open(self.filename, 'r') as f:
self._settings = json.load(f)
self.fill_in_defaults()
except:
pass
def save(self):
"""
Save settings to file.
"""
common.log('Settings', 'save')
try:
os.makedirs(os.path.dirname(self.filename))
except:
pass
open(self.filename, 'w').write(json.dumps(self._settings))
print(strings._('settings_saved').format(self.filename))
def get(self, key):
return self._settings[key]
def set(self, key, val):
# If typecasting int values fails, fallback to default values
if key == 'control_port_port' or key == 'socks_port':
try:
val = int(val)
except:
if key == 'control_port_port':
val = self.default_settings['control_port_port']
elif key == 'socks_port':
val = self.default_settings['socks_port']
self._settings[key] = val
|
willthames/ansible-lint
|
test/TestMetaChangeFromDefault.py
|
Python
|
mit
| 1,169
| 0.000855
|
# pylint: disable=preferred-module # FIXME: remove once migrated per GH-725
import unittest
from a
|
nsiblelint.rules import RulesCollection
from ansiblelint.rules.MetaChangeFromDefaultRule import MetaChangeFromDefaultRule
from ansiblelint.testing import RunFromText
DEFAULT_GALAXY_INFO = '''
galaxy_info:
author: your name
description: your description
company: your company (optional)
license: license (GPLv2, CC-BY, etc)
'''
class TestMetaChangeFromDefault(unittest.TestCase):
collection = RulesCollection()
collection.register(MetaCh
|
angeFromDefaultRule())
def setUp(self):
self.runner = RunFromText(self.collection)
def test_default_galaxy_info(self):
results = self.runner.run_role_meta_main(DEFAULT_GALAXY_INFO)
self.assertIn("Should change default metadata: author",
str(results))
self.assertIn("Should change default metadata: description",
str(results))
self.assertIn("Should change default metadata: company",
str(results))
self.assertIn("Should change default metadata: license",
str(results))
|
nlgcoin/guldencoin-official
|
test/functional/feature_maxuploadtarget.py
|
Python
|
mit
| 6,606
| 0.001968
|
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of -maxuploadtarget.
* Verify that getdata requests for old blocks (>1week) are dropped
if uploadtarget has been reached.
* Verify that getdata requests for recent blocks are respected even
if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
"""
from collections import defaultdict
import time
from test_framework.messages import CInv, msg_getdata
from test_framework.mininode import P2PInterface
from test_framework.test_framework import GuldenTestFramework
from test_framework.util import assert_equal, mine_large_block
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.block_receive_map = defaultdict(int)
def on_inv(self, message):
pass
def on_block(self, message):
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
class MaxUploadTest(GuldenTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxuploadtarget=800"]]
# Cache for utxos, as the listunspent may take a long time later in the test
self.utxo_cache = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Before we connect anything, we first set the time on the node
# to be in the past, otherwise things break because the CNode
# time counters can't be reset backward after initialization
old_time = int(time.time() - 2*60*60*24*7)
self.nodes[0].setmocktime(old_time)
# Generate some old blocks
self.nodes[0].generate(130)
# p2p_conns[0] will only request old blocks
# p2p_conns[1] will only request new blocks
# p2p_conns[2] will test resetting the counters
p2p_conns = []
for _ in range(3):
p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn()))
# Now mine a big block
mine_large_block(self.nodes[0], self.utxo_cache)
# Store the hash; we'll request this later
big_old_block = self.nodes[0].getbestblockhash()
old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
big_old_block = int(bi
|
g_old_block, 16)
# Advance to two days ago
self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
#
|
Mine one more block, so that the prior block looks old
mine_large_block(self.nodes[0], self.utxo_cache)
# We'll be requesting this new block too
big_new_block = self.nodes[0].getbestblockhash()
big_new_block = int(big_new_block, 16)
# p2p_conns[0] will test what happens if we just keep requesting the
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, big_old_block))
max_bytes_per_day = 800*1024*1024
daily_buffer = 144 * 4000000
max_bytes_available = max_bytes_per_day - daily_buffer
success_count = max_bytes_available // old_block_size
# 576MB will be reserved for relaying new blocks, so expect this to
# succeed for ~235 tries.
for i in range(success_count):
p2p_conns[0].send_message(getdata_request)
p2p_conns[0].sync_with_ping()
assert_equal(p2p_conns[0].block_receive_map[big_old_block], i+1)
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
# At most a couple more tries should succeed (depending on how long
# the test has been running so far).
for i in range(3):
p2p_conns[0].send_message(getdata_request)
p2p_conns[0].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
self.log.info("Peer 0 disconnected after downloading old block too many times")
# Requesting the current block on p2p_conns[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(800):
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].sync_with_ping()
assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
self.log.info("Peer 1 able to repeatedly download new block")
# But if p2p_conns[1] tries for an old block, it gets disconnected too.
getdata_request.inv = [CInv(2, big_old_block)]
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
self.log.info("Peer 1 disconnected after trying to download old block")
self.log.info("Advancing system time on node to clear counters...")
# If we advance the time by 24 hours, then the counters should reset,
# and p2p_conns[2] should be able to retrieve the old block.
self.nodes[0].setmocktime(int(time.time()))
p2p_conns[2].sync_with_ping()
p2p_conns[2].send_message(getdata_request)
p2p_conns[2].sync_with_ping()
assert_equal(p2p_conns[2].block_receive_map[big_old_block], 1)
self.log.info("Peer 2 able to download old block")
self.nodes[0].disconnect_p2ps()
#stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
self.log.info("Restarting nodes with -whitelist=127.0.0.1")
self.stop_node(0)
self.start_node(0, ["-whitelist=127.0.0.1", "-maxuploadtarget=1"])
# Reconnect to self.nodes[0]
self.nodes[0].add_p2p_connection(TestP2PConn())
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(20):
self.nodes[0].p2p.send_message(getdata_request)
self.nodes[0].p2p.sync_with_ping()
assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(2, big_old_block)]
self.nodes[0].p2p.send_and_ping(getdata_request)
assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the whitelist
self.log.info("Peer still connected after trying to download old block (whitelisted)")
if __name__ == '__main__':
MaxUploadTest().main()
|
zalando/turnstile
|
tests/test_discovery.py
|
Python
|
apache-2.0
| 636
| 0.003145
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from turnstile.checks import get_checks
from turnstile.manager import get_commands
CORE_COMMIT_MSG_CHECKS = ['branch_pattern', 'branch_release', 'branch_type
|
', 'protect_master', 'specification']
CORE_SUBCOMMANDS = ['config', 'install', 'remove', 'specification', 'upgrade', 'version']
def test_checks():
checks = dict(get_checks('commit_msg'))
for check_name in CORE_COMMIT_MSG_CHECKS:
as
|
sert check_name in checks
def test_subcommands():
subcommands = dict(get_commands())
for subcommand_name in CORE_SUBCOMMANDS:
assert subcommand_name in subcommands
|
chromium/chromium
|
third_party/blink/tools/blinkpy/web_tests/controllers/manager_unittest.py
|
Python
|
bsd-3-clause
| 10,253
| 0.001073
|
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for manager.py."""
import optparse
import time
import unittest
from blinkpy.common.host_mock import MockHost
from blinkpy.web_tests.controllers.manager import Manager
from blinkpy.web_tests.models import test_expectations
from blinkpy.web_tests.models.test_run_results import TestRunResults
class FakePrinter(object):
def write_update(self, s):
pass
class ManagerTest(unittest.TestCase):
def test_needs_servers(self):
def get_manager():
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
manager = Manager(
port,
options=optparse.Values({
'http': True,
'max_locked_shards': 1
}),
printer=FakePrinter())
return manager
manager = get_manager()
self.assertFalse(manager._needs_servers(['fast/html']))
manager = get_manager()
self.assertTrue(manager._needs_servers(['http/tests/misc']))
def test_servers_started(self):
def get_manager(port):
manager = Manager(
port,
options=optparse.Values({
'http': True,
'max_locked_shards': 1
}),
printer=FakePrinter())
return manager
def start_http_server(additional_dirs, number_of_drivers):
self.http_started = True
def start_websocket_server():
self.websocket_started = True
def stop_http_server():
self.http_stopped = True
def stop_websocket_server():
self.websocket_stopped = True
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
port.start_http_server = start_http_server
port.start_websocket_server = start_websocket_server
port.stop_http_server = stop_http_server
port.stop_websocket_server = stop_websocket_server
self.http_started = self.http_stopped = self.websocket_started = self.websocket_stopped = False
manager = get_manager(port)
manager._start_servers(['http/tests/foo.html'])
self.assertEqual(self.http_started, True)
self.assertEqual(self.websocket_started, False)
manager._stop_servers()
self.assertEqual(self.http_stopped, True)
self.assertEqual(self.websocket_stopped, False)
self.http_started = self.http_stopped = self.websocket_started = self.websocket_stopped = False
manager._start_servers(['http/tests/websocket/foo.html'])
self.assertEqual(self.http_started, True)
self.assertEqual(self.websocket_started, True)
manager._st
|
op_servers()
self.assertEqual(self.http_stopped, True)
self.assertEqual(self.websocket_stopped, True)
self.http_started = self.http_stopped = self.websocket_started = self.websocket_stopped = False
manager._start_servers(['fast/html/foo.html'])
self.assertEqual(self.http_started, False)
self.assertEqual(self.websocket_started, False)
manager._stop_servers()
|
self.assertEqual(self.http_stopped, False)
self.assertEqual(self.websocket_stopped, False)
def test_look_for_new_crash_logs(self):
def get_manager():
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
manager = Manager(
port,
options=optparse.Values({
'test_list': None,
'http': True,
'max_locked_shards': 1
}),
printer=FakePrinter())
return manager
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
tests = ['failures/expected/crash.html']
expectations = test_expectations.TestExpectations(port)
run_results = TestRunResults(expectations, len(tests), None)
manager = get_manager()
manager._look_for_new_crash_logs(run_results, time.time())
def _make_fake_test_result(self, host, results_directory):
host.filesystem.maybe_make_directory(results_directory)
host.filesystem.write_binary_file(results_directory + '/results.html',
'This is a test results file')
def test_rename_results_folder(self):
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
def get_manager():
manager = Manager(
port,
options=optparse.Values({
'max_locked_shards': 1
}),
printer=FakePrinter())
return manager
self._make_fake_test_result(port.host, '/tmp/layout-test-results')
self.assertTrue(
port.host.filesystem.exists('/tmp/layout-test-results'))
timestamp = time.strftime(
'%Y-%m-%d-%H-%M-%S',
time.localtime(
port.host.filesystem.mtime(
'/tmp/layout-test-results/results.html')))
archived_file_name = '/tmp/layout-test-results' + '_' + timestamp
manager = get_manager()
manager._rename_results_folder()
self.assertFalse(
port.host.filesystem.exists('/tmp/layout-test-results'))
self.assertTrue(port.host.filesystem.exists(archived_file_name))
def test_clobber_old_results(self):
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
def get_manager():
manager = Manager(
port,
options=optparse.Values({
'max_locked_shards': 1
}),
printer=FakePrinter())
return manager
self._make_fake_test_result(port.host, '/tmp/layout-test-results')
self.assertTrue(
port.host.filesystem.exists('/tmp/layout-test-results'))
manager = get_manager()
manager._clobber_old_results()
self.assertFalse(
port.host.filesystem.exists('/tmp/layout-test-results'))
def test_limit_archived_results_count(self):
host = MockHost()
port = host.port_factory.get('test-mac-mac10.10')
def get_manager():
manager = Manager(
port,
options=optparse.Values({
'max_locked_shards': 1
}),
printer=FakePrinter())
return manager
for x in range(1, 31):
|
rastermanden/metadata_example
|
meta/metadata/admin.py
|
Python
|
mit
| 1,394
| 0.017279
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from metadata.models import Metadata
from django.http import HttpResponseRedirect
class MetadataAdmin(admin.ModelAdmin):
def response_change(self, request, obj):
return HttpResponseRedirect("/")
## redirect til frontpage efter add
def response_add(self, request, obj):
return HttpResponseRedirect("/")
fieldsets = [
('Navnet metadatsættet', {'fields': ['title']}),
('Kaldenavn ', {'fields': ['slug']}),
('Resumé af metadatasættet ', {'fields': ['abstract']}),
('Formål ', {'fields': ['purpose']}),
('Kontaktinformation ', {'classes':
('collapse',),
'fields': ['responsibleparty_organisationname','electronicmailaddress',('responsibleparty_individualname', 'responsibleparty_positionname','contactinfo_telephone','address_deliverypoint')]}),
('Tid ', {'cla
|
sses': ('collapse',),'fields': [('created','updated', 'beginposition','endposition')]}),
('søgestreng ', {'classes': ('collapse',),'fields': ['search_string']}),
]
list_display = ('title', 'purpose')
prepopulated_fields = {'search_string': ('title','abstract',), 'slug':('title',),}
search_fields = ['title', 'abstract','purpose']
admin.site.register(Metadata, M
|
etadataAdmin)
|
frank-cq/Toys
|
searchBook.py
|
Python
|
apache-2.0
| 1,673
| 0.039624
|
# sudo apt install python-lxml,python-requests
from lxml import html
import requests
urlPrefix = 'https://book.douban.com/subject/'
candidateBookNums = []
candidateBookNums.append('3633461')
selectedBooks = {}
# i = 1
while candidateBookNums:
bookNum = candidateBookNums.pop(0)
bookUrl = urlPrefix + str(bookNum)
# 获取网页
page = requests.get(bookUrl)
# 将网页格式化为树型
tree = html.fromstring(page.text)
# 书籍名称
bookName = tree.xpath('//title/text()')
# 平均分
rating_num = tree.xpath('//strong[@property="v:average"]/text()')[0]
# 评分人数
rating_people = tree.xpath('//a/span[@property="v:votes"]/text()')[0]
if rating_num < 8 or rating_people < 800:
continue
stars = tree.xpath('//span[@class="rating_per"]/text()')
# 5
|
星评价比例
stars5 = stars[0]
# 4星评价比例
stars4 = stars[1]
# 3
|
星评价比例
stars3 = stars[2]
# 2星评价比例
stars2 = stars[3]
# 1星评价比例
stars1 = stars[4]
# 豆瓣读书中指向其他书的链接
links = tree.xpath('//div[@class="content clearfix"]/dl/dd/a/@href')
# 去掉空白符,如回车、换行、空格、缩进
bookName = bookName[0].strip()
# 整理豆瓣上书籍的评分信息
book = {
'name':bookName,
'score':rating_num,
'rating_people':rating_people,
'stars5':stars5,
'stars4':stars4,
'stars3':stars3,
'stars2':stars2,
'stars1':stars1,
}
selectedBooks[bookNum] = book
print bookName,book
for j in links:
bookNum = j.split('/')[-2]
if bookNum not in selectedBooks.keys() and bookNum not in candidateBookNums:
candidateBookNums.append(bookNum)
# i += 1
# if i > 100:
# break
print selectedBooks
|
andrebellafronte/stoq
|
stoqlib/domain/transfer.py
|
Python
|
gpl-2.0
| 15,361
| 0.001172
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
""" Product transfer management """
# pylint: enable=E1101
from decimal import Decimal
from kiwi.currency import currency
from storm.expr import Join, LeftJoin, Sum, Cast, Coalesce, And
from storm.info import ClassAlias
from storm.references import Reference
from zope.interface import implementer
from stoqlib.database.expr import NullIf
from stoqlib.database.properties import (DateTimeCol, IdCol, IdentifierCol,
IntCol, PriceCol, QuantityCol,
UnicodeCol, EnumCol)
from stoqlib.database.viewable import Viewable
from stoqlib.domain.base import Domain
from stoqlib.domain.fiscal import Invoice
from stoqlib.domain.product import ProductHistory, StockTransactionHistory
from stoqlib.domain.person import Person, Branch, Company
from stoqlib.domain.interfaces import IContainer, IInvoice, IInvoiceItem
from stoqlib.domain.sellable import Sellable
from stoqlib.domain.taxes import InvoiceItemIcms, InvoiceItemIpi
from stoqlib.lib.dateutils import localnow
from stoqlib.lib.translation import stoqlib_gettext
_ = stoqlib_gettext
@implementer(IInvoiceItem)
class TransferOrderItem(Domain):
"""Transfer order item
"""
__storm_table__ = 'transfer_order_item'
sellable_id = IdCol()
# FIXME: This should be a product, since it does not make sense to transfer
# serviçes
#: The |sellable| to transfer
sellable = Reference(sellable_id, 'Sellable.id')
batch_id = IdCol()
#: If the sellable is a storable, the |batch| that was transfered
batch = Reference(batch_id, 'StorableBatch.id')
transfer_order_id = IdCol()
#: The |transfer| this item belongs to
transfer_order = Reference(transfer_order_id, 'TransferOrder.id')
#: The quantity to transfer
quantity = QuantityCol()
#: Average cost of the item in the source branch at the time of transfer.
stock_cost = PriceCol(default=0)
icms_info_id = IdCol()
#: the :class:`stoqlib.domain.taxes.InvoiceItemIcms` tax for *self*
icms_info = Reference(icms_info_id, 'InvoiceItemIcms.id')
ipi_info_id = IdCol()
#: the :class:`stoqlib.domain.taxes.InvoiceItemIpi` tax for *self*
ipi_info = Reference(ipi_info_id, 'InvoiceItemIpi.id')
item_discount = Decimal('0')
def __init__(self, store=None, **kwargs):
if not 'sellable' in kwargs:
raise TypeError('You must provide a sellable argument')
kwargs['ipi_info'] = InvoiceItemIpi(store=store)
kwargs['icms_info'] = InvoiceItemIcms(store=store)
super(TransferOrderItem, self).__init__(store=store, **kwargs)
product = self.sellable.product
if product:
self.ipi_info.set_item_tax(self)
self.icms_info.set_item_tax(self)
#
# IInvoiceItem implementation
#
@property
def parent(self):
return self.transfer_order
@property
def base_price(self):
return self.stock_cost
@property
def price(self):
return self.stock_cost
@property
def nfe_cfop_code(self):
source_branch = self.transfer_order.source_branch
source_address = source_branch.person.get_main_address()
destination_branch = self.transfer_order.destination_branch
destination_address = destination_branch.person.get_main_address()
same_state = True
if (source_address.city_location.state != destination_address.city_location.state):
same_state = False
if same_state:
return u'5152'
else:
return u'6152'
#
# Public API
#
def get_total(self):
"""Returns the total cost of a transfer item eg quantity * cost"""
return self.quantity * self.sellable.cost
def send(self):
"""Sends this item to it's destination |branch|.
This method should never be used directly, and to send a transfer you
should use TransferOrder.send().
"""
product = self.sellable.product
if product.manage_stock:
storable = product.storable
storable.decrease_stock(self.quantity,
self.transfer_order.source_branch,
StockTransactionHistory.TYPE_T
|
RANSFER_TO,
self.id, batch=self.batch)
ProductHistory.add_transfered_item(self.store,
self.transfer_order.source_branch,
self)
def receive(self):
"""Receives this item, increasing the quantity in the stock.
This method should never be used directly, and to receive a transfer
you should use TransferOrder.receive().
"""
pr
|
oduct = self.sellable.product
if product.manage_stock:
storable = product.storable
storable.increase_stock(self.quantity,
self.transfer_order.destination_branch,
StockTransactionHistory.TYPE_TRANSFER_FROM,
self.id, unit_cost=self.stock_cost,
batch=self.batch)
@implementer(IContainer)
@implementer(IInvoice)
class TransferOrder(Domain):
""" Transfer Order class
"""
__storm_table__ = 'transfer_order'
STATUS_PENDING = u'pending'
STATUS_SENT = u'sent'
STATUS_RECEIVED = u'received'
statuses = {STATUS_PENDING: _(u'Pending'),
STATUS_SENT: _(u'Sent'),
STATUS_RECEIVED: _(u'Received')}
status = EnumCol(default=STATUS_PENDING)
#: A numeric identifier for this object. This value should be used instead
#: of :obj:`Domain.id` when displaying a numerical representation of this
#: object to the user, in dialogs, lists, reports and such.
identifier = IdentifierCol()
#: The date the order was created
open_date = DateTimeCol(default_factory=localnow)
#: The date the order was received
receival_date = DateTimeCol()
#: The invoice number of the transfer
invoice_number = IntCol()
#: Comments of a transfer
comments = UnicodeCol()
source_branch_id = IdCol()
#: The |branch| sending the stock
source_branch = Reference(source_branch_id, 'Branch.id')
destination_branch_id = IdCol()
#: The |branch| receiving the stock
destination_branch = Reference(destination_branch_id, 'Branch.id')
source_responsible_id = IdCol()
#: The |employee| responsible for the |transfer| at source |branch|
source_responsible = Reference(source_responsible_id, 'Employee.id')
destination_responsible_id = IdCol()
#: The |employee| responsible for the |transfer| at destination |branch|
destination_responsible = Reference(destination_responsible_id,
'Employee.id')
#: |payments| generated by this transfer
payments = None
#: |transporter| used in transfer
transporter = None
invoice_id = IdCol()
#: The |invoice| generated by the transfer
invoice = Reference(invoice_id, 'Invoice.id')
def __init__(self, store=None, **kwargs):
kwargs['invoice'] = Invoice(store=store, invoic
|
samsu/neutron
|
plugins/mlnx/common/constants.py
|
Python
|
apache-2.0
| 784
| 0
|
# Copyright 2013 Mellanox Technologies, Ltd
#
# Lice
|
nsed under the Apache License, Version 2.0 (the "
|
License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOCAL_VLAN_ID = -2
FLAT_VLAN_ID = -1
# Values for physical network_type
TYPE_IB = 'ib'
TYPE_ETH = 'eth'
VIF_TYPE_DIRECT = 'mlnx_direct'
VIF_TYPE_HOSTDEV = 'hostdev'
VNIC_TYPE = 'vnic_type'
|
kostyakudinov/Prog
|
usr/share/vim/vim74/tools/demoserver.py
|
Python
|
gpl-2.0
| 3,102
| 0.000967
|
#!/usr/bin/python
#
# Server that will accept connections from a Vim channel.
# Run this server and then in Vim you can open the channel:
# :let handle = ch_open('localhost:8765')
#
# Then Vim can send requests to the server:
# :let response = ch_sendexpr(handle, 'hello!')
#
# And you can control Vim by typing a JSON message here, e.g.:
# ["ex","echo 'hi there'"]
#
# There is no prompt, just type a line and press Enter.
# To exit cleanly type "quit<Enter>".
#
# See ":help channel-demo" in Vim.
#
# This requires Python 2.6 or later.
from __future__ import print_function
import json
import socket
import sys
import threading
try:
# Python 3
import socketserver
except ImportError:
# Python 2
import SocketServer as socketserver
thesocket = None
class ThreadedTCPRequestHandler(socketserver.BaseRequestHandler):
def handle(self):
print("=== socket opened ===")
global thesocket
thesocket = self.request
while True:
try:
data = self.request.recv(4096).decode('utf-8')
except socket.error:
print("=== socket error ===")
break
except IOError:
prin
|
t("=== socket closed ===")
break
if data == '':
print("=== socket closed ===")
break
print("received: {}".format(data))
try:
decoded = json.loads(data)
except ValueError:
print("json decoding failed")
decoded = [-1, '']
# Send a response if the sequence number is positive.
# Negative numbers are used for "eval" respo
|
nses.
if decoded[0] >= 0:
if decoded[1] == 'hello!':
response = "got it"
else:
response = "what?"
encoded = json.dumps([decoded[0], response])
print("sending {}".format(encoded))
self.request.sendall(encoded.encode('utf-8'))
thesocket = None
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
pass
if __name__ == "__main__":
HOST, PORT = "localhost", 8765
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
ip, port = server.server_address
# Start a thread with the server -- that thread will then start one
# more thread for each request
server_thread = threading.Thread(target=server.serve_forever)
# Exit the server thread when the main thread terminates
server_thread.daemon = True
server_thread.start()
print("Server loop running in thread: ", server_thread.name)
print("Listening on port {}".format(PORT))
while True:
typed = sys.stdin.readline()
if "quit" in typed:
print("Goodbye!")
break
if thesocket is None:
print("No socket yet")
else:
print("sending {}".format(typed))
thesocket.sendall(typed.encode('utf-8'))
server.shutdown()
server.server_close()
|
Glottotopia/aagd
|
moin/local/moin/build/lib.linux-x86_64-2.6/MoinMoin/support/passlib/handlers/sun_md5_crypt.py
|
Python
|
mit
| 14,328
| 0.006421
|
"""passlib.handlers.sun_md5_crypt - Sun's Md5 Crypt, used on Solaris
.. warning::
This implementation may not reproduce
the original Solaris behavior in some border cases.
See documentation for details.
"""
#=============================================================================
# imports
#===========================================
|
==========================
|
========
# core
from hashlib import md5
import re
import logging; log = logging.getLogger(__name__)
from warnings import warn
# site
# pkg
from passlib.utils import h64, to_unicode
from passlib.utils.compat import b, bytes, byte_elem_value, irange, u, \
uascii_to_str, unicode, str_to_bascii
import passlib.utils.handlers as uh
# local
__all__ = [
"sun_md5_crypt",
]
#=============================================================================
# backend
#=============================================================================
# constant data used by alg - Hamlet act 3 scene 1 + null char
# exact bytes as in http://www.ibiblio.org/pub/docs/books/gutenberg/etext98/2ws2610.txt
# from Project Gutenberg.
MAGIC_HAMLET = b(
"To be, or not to be,--that is the question:--\n"
"Whether 'tis nobler in the mind to suffer\n"
"The slings and arrows of outrageous fortune\n"
"Or to take arms against a sea of troubles,\n"
"And by opposing end them?--To die,--to sleep,--\n"
"No more; and by a sleep to say we end\n"
"The heartache, and the thousand natural shocks\n"
"That flesh is heir to,--'tis a consummation\n"
"Devoutly to be wish'd. To die,--to sleep;--\n"
"To sleep! perchance to dream:--ay, there's the rub;\n"
"For in that sleep of death what dreams may come,\n"
"When we have shuffled off this mortal coil,\n"
"Must give us pause: there's the respect\n"
"That makes calamity of so long life;\n"
"For who would bear the whips and scorns of time,\n"
"The oppressor's wrong, the proud man's contumely,\n"
"The pangs of despis'd love, the law's delay,\n"
"The insolence of office, and the spurns\n"
"That patient merit of the unworthy takes,\n"
"When he himself might his quietus make\n"
"With a bare bodkin? who would these fardels bear,\n"
"To grunt and sweat under a weary life,\n"
"But that the dread of something after death,--\n"
"The undiscover'd country, from whose bourn\n"
"No traveller returns,--puzzles the will,\n"
"And makes us rather bear those ills we have\n"
"Than fly to others that we know not of?\n"
"Thus conscience does make cowards of us all;\n"
"And thus the native hue of resolution\n"
"Is sicklied o'er with the pale cast of thought;\n"
"And enterprises of great pith and moment,\n"
"With this regard, their currents turn awry,\n"
"And lose the name of action.--Soft you now!\n"
"The fair Ophelia!--Nymph, in thy orisons\n"
"Be all my sins remember'd.\n\x00" #<- apparently null at end of C string is included (test vector won't pass otherwise)
)
# NOTE: these sequences are pre-calculated iteration ranges used by X & Y loops w/in rounds function below
xr = irange(7)
_XY_ROUNDS = [
tuple((i,i,i+3) for i in xr), # xrounds 0
tuple((i,i+1,i+4) for i in xr), # xrounds 1
tuple((i,i+8,(i+11)&15) for i in xr), # yrounds 0
tuple((i,(i+9)&15, (i+12)&15) for i in xr), # yrounds 1
]
del xr
def raw_sun_md5_crypt(secret, rounds, salt):
"given secret & salt, return encoded sun-md5-crypt checksum"
global MAGIC_HAMLET
assert isinstance(secret, bytes)
assert isinstance(salt, bytes)
# validate rounds
if rounds <= 0:
rounds = 0
real_rounds = 4096 + rounds
# NOTE: spec seems to imply max 'rounds' is 2**32-1
# generate initial digest to start off round 0.
# NOTE: algorithm 'salt' includes full config string w/ trailing "$"
result = md5(secret + salt).digest()
assert len(result) == 16
# NOTE: many things in this function have been inlined (to speed up the loop
# as much as possible), to the point that this code barely resembles
# the algorithm as described in the docs. in particular:
#
# * all accesses to a given bit have been inlined using the formula
# rbitval(bit) = (rval((bit>>3) & 15) >> (bit & 7)) & 1
#
# * the calculation of coinflip value R has been inlined
#
# * the conditional division of coinflip value V has been inlined as
# a shift right of 0 or 1.
#
# * the i, i+3, etc iterations are precalculated in lists.
#
# * the round-based conditional division of x & y is now performed
# by choosing an appropriate precalculated list, so that it only
# calculates the 7 bits which will actually be used.
#
X_ROUNDS_0, X_ROUNDS_1, Y_ROUNDS_0, Y_ROUNDS_1 = _XY_ROUNDS
# NOTE: % appears to be *slightly* slower than &, so we prefer & if possible
round = 0
while round < real_rounds:
# convert last result byte string to list of byte-ints for easy access
rval = [ byte_elem_value(c) for c in result ].__getitem__
# build up X bit by bit
x = 0
xrounds = X_ROUNDS_1 if (rval((round>>3) & 15)>>(round & 7)) & 1 else X_ROUNDS_0
for i, ia, ib in xrounds:
a = rval(ia)
b = rval(ib)
v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1)
x |= ((rval((v>>3)&15)>>(v&7))&1) << i
# build up Y bit by bit
y = 0
yrounds = Y_ROUNDS_1 if (rval(((round+64)>>3) & 15)>>(round & 7)) & 1 else Y_ROUNDS_0
for i, ia, ib in yrounds:
a = rval(ia)
b = rval(ib)
v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1)
y |= ((rval((v>>3)&15)>>(v&7))&1) << i
# extract x'th and y'th bit, xoring them together to yeild "coin flip"
coin = ((rval(x>>3) >> (x&7)) ^ (rval(y>>3) >> (y&7))) & 1
# construct hash for this round
h = md5(result)
if coin:
h.update(MAGIC_HAMLET)
h.update(unicode(round).encode("ascii"))
result = h.digest()
round += 1
# encode output
return h64.encode_transposed_bytes(result, _chk_offsets)
# NOTE: same offsets as md5_crypt
_chk_offsets = (
12,6,0,
13,7,1,
14,8,2,
15,9,3,
5,10,4,
11,
)
#=============================================================================
# handler
#=============================================================================
class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler):
"""This class implements the Sun-MD5-Crypt password hash, and follows the :ref:`password-hash-api`.
It supports a variable-length salt, and a variable number of rounds.
The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords:
:type salt: str
:param salt:
Optional salt string.
If not specified, a salt will be autogenerated (this is recommended).
If specified, it must be drawn from the regexp range ``[./0-9A-Za-z]``.
:type salt_size: int
:param salt_size:
If no salt is specified, this parameter can be used to specify
the size (in characters) of the autogenerated salt.
It currently defaults to 8.
:type rounds: int
:param rounds:
Optional number of rounds to use.
Defaults to 5000, must be between 0 and 4294963199, inclusive.
:type bare_salt: bool
:param bare_salt:
Optional flag used to enable an alternate salt digest behavior
used by some hash strings in this scheme.
This flag can be ignored by most users.
Defaults to ``False``.
(see :ref:`smc-bare-salt` for details).
:type relaxed: bool
:param relaxed:
By default, providing an invalid value for one of the other
|
plumgrid/plumgrid-nova
|
nova/tests/api/openstack/compute/plugins/v3/test_instance_actions.py
|
Python
|
apache-2.0
| 12,710
| 0.000393
|
# Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
from lxml import etree
from webob import exc
from nova.api.openstack.compute.plugins.v3 import instance_actions
from nova.compute import api as compute_api
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.openstack.common import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests import fake_instance_actions
FAKE_UUID = fake_instance_actions.FAKE_UUID
FAKE_REQUEST_ID = fake_instance_actions.FAKE_REQUEST_ID1
def format_action(action):
'''Remove keys that aren't serialized.'''
to_delete = ('id', 'finish_time', 'created_at', 'updated_at', 'deleted_at',
'deleted')
for key in to_delete:
if key in action:
del(action[key])
if 'start_time' in action:
# NOTE(danms): Without WSGI above us, these will be just stringified,
# and objects will have added a timezone, so strip that for comparison
action['start_time'] = str(action['start_time'].replace(tzinfo=None))
for event in action.get('events', []):
format_event(event)
return action
def format_event(event):
'''Remove keys that aren't serialized.'''
to_delete = ('id', 'created_at', 'updated_at', 'deleted_at', 'deleted',
'action_id')
for key in to_delete:
if key in event:
del(event[key])
if 'start_time' in event:
# NOTE(danms): Without WSGI above us, these will be just stringified,
# and objects will have added a timezone, so strip that for comparison
event['start_time'] = str(event['start_time'].replace(tzinfo=None))
if 'finish_time' in event:
# NOTE(danms): Without WSGI above us, these will be just stringified,
# and objects will have added a timezone, so strip that for comparison
event['finish_time'] = str(event['finish_time'].replace(tzinfo=None))
return event
class InstanceActionsPolicyTest(test.TestCase):
def setUp(self):
super(InstanceActionsPolicyTest, self).setUp()
self.controller = instance_actions.InstanceActionsController()
def test_list_actions_restricted_by_project(self):
rules = policy.Rules({'compute:get': policy.parse_rule(''),
'compute_extension:v3:os-instance-actions':
policy.parse_rule('project_id:%(project_id)s')})
policy.set_rules(rules)
def fake_instance_get_by_uuid(context, instance_id,
columns_to_join=None):
return fake_instance.fake_db_instance(
**{'name': 'fake', 'project_id': '%s_unequal' %
context.project_id})
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
req = fakes.HTTPRequestV3.blank('/servers/12/os-instance-actions')
self.assertRaises(exception.NotAuthorized, self.controller.index, req,
str(uuid.uuid4()))
def test_get_action_restricted_by_project(self):
rules = policy.Rules({'compute:get': policy.parse_rule(''),
'compute_extension:v3:os-instance-actions':
policy.parse_rule('project_id:%(project_id)s')})
policy.set_rules(rules)
def fake_instance_get_by_uuid(context, instance_id,
columns_to_join=None):
return fake_instance.fake_db_instance(
**{'name': 'fake', 'project_id': '%s_unequal' %
context.proje
|
ct_id})
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
req = fakes.HTTPRequestV3.blank(
|
'/servers/12/os-instance-actions/1')
self.assertRaises(exception.NotAuthorized, self.controller.show, req,
str(uuid.uuid4()), '1')
class InstanceActionsTest(test.TestCase):
def setUp(self):
super(InstanceActionsTest, self).setUp()
self.controller = instance_actions.InstanceActionsController()
self.fake_actions = copy.deepcopy(fake_instance_actions.FAKE_ACTIONS)
self.fake_events = copy.deepcopy(fake_instance_actions.FAKE_EVENTS)
def fake_get(self, context, instance_uuid):
return {'uuid': instance_uuid}
def fake_instance_get_by_uuid(context, instance_id,
columns_to_join=None):
return fake_instance.fake_db_instance(
**{'name': 'fake', 'project_id': '%s_unequal' %
context.project_id})
self.stubs.Set(compute_api.API, 'get', fake_get)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
def test_list_actions(self):
def fake_get_actions(context, uuid):
actions = []
for act in self.fake_actions[uuid].itervalues():
action = models.InstanceAction()
action.update(act)
actions.append(action)
return actions
self.stubs.Set(db, 'actions_get', fake_get_actions)
req = fakes.HTTPRequestV3.blank('/servers/12/os-instance-actions')
res_dict = self.controller.index(req, FAKE_UUID)
for res in res_dict['instance_actions']:
fake_action = self.fake_actions[FAKE_UUID][res['request_id']]
self.assertEqual(format_action(fake_action),
format_action(res))
def test_get_action_with_events_allowed(self):
def fake_get_action(context, uuid, request_id):
action = models.InstanceAction()
action.update(self.fake_actions[uuid][request_id])
return action
def fake_get_events(context, action_id):
events = []
for evt in self.fake_events[action_id]:
event = models.InstanceActionEvent()
event.update(evt)
events.append(event)
return events
self.stubs.Set(db, 'action_get_by_request_id', fake_get_action)
self.stubs.Set(db, 'action_events_get', fake_get_events)
req = fakes.HTTPRequestV3.blank(
'/servers/12/os-instance-actions/1',
use_admin_context=True)
res_dict = self.controller.show(req, FAKE_UUID, FAKE_REQUEST_ID)
fake_action = self.fake_actions[FAKE_UUID][FAKE_REQUEST_ID]
fake_events = self.fake_events[fake_action['id']]
fake_action['events'] = fake_events
self.assertEqual(format_action(fake_action),
format_action(res_dict['instance_action']))
def test_get_action_with_events_not_allowed(self):
def fake_get_action(context, uuid, request_id):
return self.fake_actions[uuid][request_id]
def fake_get_events(context, action_id):
return self.fake_events[action_id]
self.stubs.Set(db, 'action_get_by_request_id', fake_get_action)
self.stubs.Set(db, 'action_events_get', fake_get_events)
rules = policy.Rules({
'compute:get': policy.parse_rule(''),
'compute_extension:v3:os-instance-actions':
policy.parse_rule(''),
'compute_extension:v3:os-instance-actions:events':
policy.parse_rule('is_admin:True')})
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank(
'/servers/12/os-instance
|
jswope00/GAI
|
lms/djangoapps/dashboard/sysadmin.py
|
Python
|
agpl-3.0
| 27,630
| 0.00076
|
"""
This module creates a sysadmin dashboard for managing and viewing
courses.
"""
import csv
import json
import logging
import os
import subprocess
import time
import StringIO
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.db import IntegrityError
from django.http import HttpResponse, Http404
from django.utils.decorators import method_decorator
from django.utils.html import escape
from django.utils import timezone
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_control
from django.views.generic.base import TemplateView
from django.views.decorators.http import condition
from django_future.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
import mongoengine
from courseware.courses import get_course_by_id
import dashboard.git_import as git_import
from dashboard.git_import import GitImportError
from student.roles import CourseStaffRole, CourseInstructorRole
from dashboard.models import CourseImportLog
from external_auth.models import ExternalAuthMap
from external_auth.views import generate_password
from student.models import CourseEnrollment, UserProfile, Registration
import track.views
from xmodule.contentstore.django import contentstore
from xmodule.modulestore import XML_MODULESTORE_TYPE
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.store_utilities import delete_course
from xmodule.modulestore.xml import XMLModuleStore
log = logging.getLogger(__name__)
class SysadminDashboardView(TemplateView):
"""Base class for sysadmin dashboard views with common methods"""
template_name = 'sysadmin_dashboard.html'
def __init__(self, **kwargs):
"""
Initialize base sysadmin dashboard class with modulestore,
modulestore_type and return msg
"""
self.def_ms = modulestore()
self.is_using_mongo = True
if isinstance(self.def_ms, XMLModuleStore):
self.is_using_mongo = False
self.msg = u''
self.datatable = []
super(SysadminDashboardView, self).__init__(**kwargs)
@method_decorator(ensure_csrf_cookie)
@method_decorator(login_required)
@method_decorator(cache_control(no_cache=True, no_store=True,
must_revalidate=True))
@method_decorator(condition(etag_func=None))
def dispatch(self, *args, **kwargs):
return super(SysadminDashboardView, self).dispatch(*args, **kwargs)
def get_courses(self):
""" Get an iterable list of courses."""
courses = self.def_ms.get_courses()
courses = dict([c.id, c] for c in courses) # no course directory
return courses
def return_csv(self, filename, header, data):
"""
Convenient function for handling the http response of a csv.
data should be iterable and is used to stream object over http
"""
csv_file = StringIO.StringIO()
writer = csv.writer(csv_file, dialect='excel', quotechar='"',
quoting=csv.QUOTE_ALL)
writer.writerow(header)
# Setup streaming of the data
def read_and_flush():
"""Read and clear buffer for optimization"""
csv_file.seek(0)
csv_data = csv_file.read()
csv_file.seek(0)
csv_file.truncate()
return csv_data
def csv_data():
"""Generator for handling potentially large CSVs"""
for row in data:
writer.write
|
row(row)
csv_data = read_and_flush()
yield csv_data
response = HttpResponse(csv_data(), mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename={0}'.format(
filename)
return response
class Users(SysadminDashboardView):
"""
The status view provides Web based user management, a listing of
courses loaded, and user statistics
"""
|
def fix_external_auth_map_passwords(self):
"""
This corrects any passwords that have drifted from eamap to
internal django auth. Needs to be removed when fixed in external_auth
"""
msg = ''
for eamap in ExternalAuthMap.objects.all():
euser = eamap.user
epass = eamap.internal_password
if euser is None:
continue
try:
testuser = authenticate(username=euser.username, password=epass)
except (TypeError, PermissionDenied, AttributeError), err:
msg += _('Failed in authenticating {0}, error {1}\n'
).format(euser, err)
continue
if testuser is None:
msg += _('Failed in authenticating {0}\n').format(euser)
msg += _('fixed password')
euser.set_password(epass)
euser.save()
continue
if not msg:
msg = _('All ok!')
return msg
def create_user(self, uname, name, password=None):
""" Creates a user (both SSL and regular)"""
if not uname:
return _('Must provide username')
if not name:
return _('Must provide full name')
email_domain = getattr(settings, 'SSL_AUTH_EMAIL_DOMAIN', 'MIT.EDU')
msg = u''
if settings.FEATURES['AUTH_USE_CERTIFICATES']:
if not '@' in uname:
email = '{0}@{1}'.format(uname, email_domain)
else:
email = uname
if not email.endswith('@{0}'.format(email_domain)):
msg += u'{0} @{1}'.format(_('email must end in'), email_domain)
return msg
mit_domain = 'ssl:MIT'
if ExternalAuthMap.objects.filter(external_id=email,
external_domain=mit_domain):
msg += _('Failed - email {0} already exists as '
'external_id').format(email)
return msg
new_password = generate_password()
else:
if not password:
return _('Password must be supplied if not using certificates')
email = uname
if not '@' in email:
msg += _('email address required (not username)')
return msg
new_password = password
user = User(username=uname, email=email, is_active=True)
user.set_password(new_password)
try:
user.save()
except IntegrityError:
msg += _('Oops, failed to create user {0}, '
'IntegrityError').format(user)
return msg
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.save()
if settings.FEATURES['AUTH_USE_CERTIFICATES']:
credential_string = getattr(settings, 'SSL_AUTH_DN_FORMAT_STRING',
'/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}')
credentials = credential_string.format(name, email)
eamap = ExternalAuthMap(
external_id=email,
external_email=email,
external_domain=mit_domain,
external_name=name,
internal_password=new_password,
external_credentials=json.dumps(credentials),
)
eamap.user = user
eamap.dtsignup = timezone.now()
eamap.save()
msg += _('User {0} created successfully!').format(user)
return msg
def delete_user(self, uname):
"""Deletes a user from django auth"""
if not uname:
return _('Must provide username')
if '@' in uname:
try:
user = User.objects.get(email=uname)
except User.DoesNotExist, err:
|
run2fail/locker
|
locker/network.py
|
Python
|
gpl-3.0
| 16,640
| 0.001563
|
'''
Network related functionality like bridge and netfilter setup
'''
import itertools
import logging
import re
import time
import iptc
import locker
import netaddr
import pyroute2
from locker.util import regex_ip
class BridgeUnavailable(Exception):
''' Bridge device does not exist
Raised if the project specific bridge is unavailable, i.e., it cannot be
access by the current user (due to privileges or because it has not yet
been created).
'''
pass
class Network(object):
'''
The network class handles the bridge and container-unspecific netfilter rules
'''
de
|
f __init__(self, project):
''' Calls start() to setup n
|
etfilter rules and bridge
'''
self.project = project
self._bridge = self._get_existing_bridge()
@property
def project(self):
''' Get locker project instance '''
return self._project
@project.setter
def project(self, value):
''' Set locker project instance '''
if not isinstance(value, locker.Project):
raise TypeError('Invalid type for property project: %s, required type = %s' % (type(value), type(locker.Project)))
self._project = value
@property
def bridge(self):
''' Get bridge assigned to the project '''
if not self._bridge:
raise BridgeUnavailable()
return self._bridge
@bridge.setter
def bridge(self, value):
''' Set bridge assigned to the project '''
if not isinstance(value, pyroute2.ipdb.interface.Interface):
raise TypeError('Invalid type for property bridge: %s, required type = %s' % (type(value), type(pyroute2.ipdb.interface.Interface)))
self._bridge = value
@property
def bridge_ifname(self):
''' Get the name of the bridge assigned to the project '''
return self.bridge.ifname
@property
def gateway(self):
''' Get gateway / IP address of the bridge '''
bridge_ip, bridge_cidr = Network._if_to_ip(self.bridge)
return bridge_ip
@staticmethod
def find_comment_in_chain(comment, chain):
''' Search rule with a matching comment
:param comment: The comment to match
:param chain: The chain to search
:returns: True if any matching rule found, else False
'''
for rule in chain.rules:
for match in rule.matches:
if match.name == 'comment' and match.comment == comment:
logging.debug('Found rule with comment \"%s\" in \"%s\" chain', comment, chain.name)
return True
return False
@staticmethod
def _delete_if_comment(comment, table, chain):
''' Search rule with a matching comment and delete it
:param comment: The comment to match
:param table: The table containing the chain
:param chain: The chain to search
'''
# work-around (iptc.ip4tc.IPTCError: can't commit: b'Resource temporarily unavailable')
table.autocommit = False
for rule in chain.rules:
for match in rule.matches:
if match.name == 'comment' and match.comment == comment:
logging.debug('Cleaning up rule from chain: %s', chain.name)
try:
chain.delete_rule(rule)
except iptc.IPTCError as exception:
logging.warn('Could not cleanup rule from chain \"%s\": %s', chain, exception)
table.commit()
table.refresh()
table.autocommit = True
def _setup_locker_chains(self):
''' Add container unspecific netfilter rules
This method does the following
- Adds LOCKER_PREROUTING chain to the NAT table
- Creates a rule from the PREROUTING chain in the NAT table to the
LOCKER_PREROUTING chain
- Adds LOCKER_FORWARD chain to the FILTER table
- Creates a rule from the FORWARD chain in the FILTER table to the
LOCKER_FORWARD chain
- Ensures that the jump rules are only added once (the rules' comments
are checked for a match)
:raises: iptc.IPTCError if a chain cannot be retrieved or created
'''
nat_table = iptc.Table(iptc.Table.NAT)
if 'LOCKER_PREROUTING' not in [c.name for c in nat_table.chains]:
try:
logging.debug('Adding LOCKER_PREROUTING chain to NAT table')
nat_table.create_chain('LOCKER_PREROUTING')
except iptc.IPTCError as exception:
logging.error('Was not able to create LOCKER_PREROUTING chain in NAT table, cannot add rules: %s', exception)
raise
nat_prerouting_chain = iptc.Chain(nat_table, 'PREROUTING')
if not Network.find_comment_in_chain('LOCKER', nat_prerouting_chain):
jump_to_locker_rule = iptc.Rule()
jump_to_locker_rule.create_target("LOCKER_PREROUTING")
addr_type_match = jump_to_locker_rule.create_match("addrtype")
addr_type_match.dst_type = "LOCAL"
comment_match = jump_to_locker_rule.create_match("comment")
comment_match.comment = 'LOCKER'
nat_prerouting_chain.insert_rule(jump_to_locker_rule)
filter_table = iptc.Table(iptc.Table.FILTER)
if 'LOCKER_FORWARD' not in [c.name for c in filter_table.chains]:
try:
logging.debug('Adding LOCKER_FORWARD chain to NAT table')
filter_table.create_chain('LOCKER_FORWARD')
except iptc.IPTCError as exception:
logging.error('Was not able to create LOCKER_FORWARD chain in NAT table, cannot add rules: %s', exception)
raise
forward_chain = iptc.Chain(filter_table, 'FORWARD')
if not Network.find_comment_in_chain('LOCKER', forward_chain):
jump_to_locker_rule = iptc.Rule()
jump_to_locker_rule.create_target("LOCKER_FORWARD")
#addr_type_match = jump_to_locker_rule.create_match("addrtype")
#addr_type_match.dst_type = "LOCAL"
comment_match = jump_to_locker_rule.create_match("comment")
comment_match.comment = 'LOCKER'
forward_chain.insert_rule(jump_to_locker_rule)
def start(self):
''' Sets bridge and netfilter rules up
'''
logging.info('Starting Locker network')
self._setup_locker_chains()
self._create_bridge()
self._enable_nat()
def _enable_nat(self):
''' Add netfilter rules that enable direct communication from the containers
'''
filter_forward = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'LOCKER_FORWARD')
if not Network.find_comment_in_chain(self.bridge_ifname, filter_forward):
logging.info('Adding NAT rules for external access')
# enable access the containers to external destinations
enable_outbound = iptc.Rule()
enable_outbound.in_interface = self.bridge_ifname
enable_outbound.out_interface = '!%s' % self.bridge_ifname
enable_outbound.create_target('ACCEPT')
comment_match = enable_outbound.create_match("comment")
comment_match.comment = self.bridge_ifname
filter_forward.insert_rule(enable_outbound)
# enable access from external source to the containers
enable_inbound = iptc.Rule()
enable_inbound.in_interface = '!%s' % self.bridge_ifname
enable_inbound.out_interface = self.bridge_ifname
enable_inbound.create_target('ACCEPT')
comment_match = enable_inbound.create_match("comment")
comment_match.comment = self.bridge_ifname
filter_forward.insert_rule(enable_inbound)
nat_prerouting = iptc.Chain(iptc.Table(iptc.Table.NAT), 'POSTROUTING')
if not Network.find_comment_in_chain(self.bridge_ifname, nat_prerouting):
logging.info('Adding masquerade rules for external access')
# masquerade outbound connections
bridge_ip, bridge_cidr = Network._if_to_ip(self.bridg
|
Passtechsoft/TPEAlpGen
|
blender/doc/blender_file_format/BlendFileDnaExporter_25.py
|
Python
|
gpl-3.0
| 15,484
| 0.01001
|
#!/usr/bin/env python3
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENCE BLOCK *****
######################################################
#
# Name:
# dna.py
#
# Description:
# Creates a browsable DNA output to HTML.
#
# Author:
# Jeroen Bakker
#
# Version:
# v0.1 (12-05-2009) - migration of original source code to python.
# Added code to support blender 2.5 branch
# v0.2 (25-05-2009) - integrated with BlendFileReader.py
#
# Input:
# blender build executable
#
# Output:
# dna.html
# dna.css (will only be created when not existing)
#
# Startup:
# ./blender -P BlendFileDnaExporter.py
#
# Process:
# 1: write blend file with SDNA info
# 2: read blend header from blend file
# 3: seek DNA1 file-block
# 4: read dna record from blend file
# 5: close and eventually delete temp blend file
# 6: export dna to html and css
# 7: quit blender
#
######################################################
import struct
import sys
import getopt # command line arguments handling
from string import Template # strings completion
# logs
import logging
log = logging.getLogger("BlendFileDnaExporter")
if '--dna-debug' in sys.argv:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
class DNACatalogHTML:
'''
DNACatalog is a catalog of all information in the DNA1 file-block
'''
def __init__(self, catalog, bpy_module = None):
self.Catalog = catalog
self.bpy = bpy_module
def WriteToHTML(self, handle):
dna_html_template = """
<!DOCTYPE html PUBLIC -//W3C//DTD HTML 4.01 Transitional//EN http://www.w3.org/TR/html4/loose.dtd>
<html>
<head>
<link rel="stylesheet" type="text/css" href="dna.css" media="screen, print" />
<meta http-equiv="Content-Type" content="text/html"; charset="ISO-8859-1" />
<title>The mystery of the blend</title>
</head>
<body>
<div class=title>
Blender ${version}<br/>
Internal SDNA structures
</div>
Architecture: ${bitness} ${endianness}<br/>
Build revision: <a href="https://svn.blender.org/svnroot/bf-blender/!svn/bc/${revision}/trunk/">${revision}</a><br/>
File format reference: <a href="mystery_of_the_blend.html">The mystery of the blend</a> by Jeroen Bakker<br/>
<h1>Index of blender structures</h1>
<ul class=multicolumn>
${structs_list}
</ul>
${structs_content}
</body>
</html>"""
header = self.Catalog.Header
bpy = self.bpy
# ${version} and ${revision}
if bpy:
version = '.'.join(map(str, bpy.app.version))
revision = bpy.app.build_hash
else:
version = str(header.Version)
revision = 'Unknown'
# ${bitness}
if header.PointerSize == 8:
bitness = '64 bit'
else:
bitness = '32 bit'
# ${endianness}
if header.LittleEndianness:
endianess= 'Little endianness'
else:
endianess= 'Big endianness'
# ${structs_list}
log.debug("Creating structs index")
structs_list = ''
list_item = '<li class="multicolumn">({0}) <a href="#{1}">{1}</a></li>\n'
|
structureIndex = 0
for structure in self.
|
Catalog.Structs:
structs_list += list_item.format(structureIndex, structure.Type.Name)
structureIndex+=1
# ${structs_content}
log.debug("Creating structs content")
structs_content = ''
for structure in self.Catalog.Structs:
log.debug(structure.Type.Name)
structs_content += self.Structure(structure)
d = dict(
version = version,
revision = revision,
bitness = bitness,
endianness = endianess,
structs_list = structs_list,
structs_content = structs_content
)
dna_html = Template(dna_html_template).substitute(d)
dna_html = self.format(dna_html)
handle.write(dna_html)
def Structure(self, structure):
struct_table_template = """
<table><a name="${struct_name}"></a>
<caption><a href="#${struct_name}">${struct_name}</a></caption>
<thead>
<tr>
<th>reference</th>
<th>structure</th>
<th>type</th>
<th>name</th>
<th>offset</th>
<th>size</th>
</tr>
</thead>
<tbody>
${fields}
</tbody>
</table>
<label>Total size: ${size} bytes</label><br/>
<label>(<a href="#top">top</a>)</label><br/>"""
d = dict(
struct_name = structure.Type.Name,
fields = self.StructureFields(structure, None, 0),
size = str(structure.Type.Size)
)
struct_table = Template(struct_table_template).substitute(d)
return struct_table
def StructureFields(self, structure, parentReference, offset):
fields = ''
for field in structure.Fields:
fields += self.StructureField(field, structure, parentReference, offset)
offset += field.Size(self.Catalog.Header)
return fields
def StructureField(self, field, structure, parentReference, offset):
structure_field_template = """
<tr>
<td>${reference}</td>
<td>${struct}</td>
<td>${type}</td>
<td>${name}</td>
<td>${offset}</td>
<td>${size}</td>
</tr>"""
if field.Type.Structure is None or field.Name.IsPointer():
# ${reference}
reference = field.Name.AsReference(parentReference)
# ${struct}
if parentReference is not None:
struct = '<a href="#{0}">{0}</a>'.format(structure.Type.Name)
else:
struct = structure.Type.Name
# ${type}
type = field.Type.Name
# ${name}
name = field.Name.Name
# ${offset}
# offset already set
# ${size}
size = field.Size(self.Catalog.Header)
d = dict(
reference = reference,
struct = struct,
type = type,
name = name,
offset = offset,
size = size
)
structure_field = Template(structure_field_template).substitute(d)
elif field.Type.Structure is not None:
reference = field.Name.AsReference(parentReference)
structure_field = self.StructureFields(field.Type.Structure, reference, offset)
ret
|
mediawiki-utilities/python-mwreverts
|
mwreverts/tests/test_detector.py
|
Python
|
mit
| 849
| 0
|
from nose.tools import eq_
from ..detector import Detector
def test_detector():
detector = Detector(2)
eq_(detector.process("a", {'id': 1}), None)
# Check noop
eq_(detector.process("a", {'id': 2}), None)
# Short revert
|
eq_(detector.process("b", {'id': 3}), None)
eq_(
detector.process("a", {'id': 4}),
({'id': 4}, [{'id': 3}], {'id': 2})
)
# Medium revert
eq_(detector.process("c", {'id': 5}), None)
eq_(detector.process("d", {'id': 6}), None)
eq_(
detector.process("a", {'id': 7}),
|
({'id': 7}, [{'id': 6}, {'id': 5}], {'id': 4})
)
# Long (undetected) revert
eq_(detector.process("e", {'id': 8}), None)
eq_(detector.process("f", {'id': 9}), None)
eq_(detector.process("g", {'id': 10}), None)
eq_(detector.process("a", {'id': 11}), None)
|
electric-cloud/metakit
|
python/metakit.py
|
Python
|
mit
| 2,471
| 0.017402
|
"""
metakit.py -- Utility code for the Python interface to Metakit
$Id: metakit.py 1669 2007-06-16 00:23:25Z jcw $
This is part of Metakit, see http://www.equi4.com/metakit.html
This wraps the raw Mk4py compiled extension interface.
To use Metakit through this interface, simply do:
import metakit
After that, things like metakit.storage(...) are available,
as well as utilities defined below. This assumes that both
both metakit.py and Mk4py.{dll,so} can be found by Python.
"""
_oldname = __name__
__name__ = "metakit"
__version__ = "2.4.9.7"
__description__ = "Python bindings to the Metakit database library"
__author__ = "Gordon McMillan / Jean-Claude Wippler"
__email__ = "jcw@equi4.com"
__url__ = "http://www.equi4.com/metakit/python.html"
__license__ = "X/MIT style, see: http://www.equi4.com/mklicense.html"
from Mk4py import *
import string
def dump(view, title=None):
"""pretty printer for MK views"""
widths = []
cols = []
justs = []
props = view.structure()
for prop in props:
widths.append(len(prop.name))
cols.append([None])
if prop.type in ('I','F','D','V'):
justs.append(string.rjust)
else:
justs.append(string.ljust)
for row in view:
for c in range(len(props)):
attr = getattr(row, props[c].name, None)
if type(attr) is type(view):
text = '%d rows' % len(attr)
else:
text = str(attr)
if len(text) > 20:
text = text[0:17] + '...'
widths[c] = max(widths[c],len(text))
cols[c].append(text)
if title: print title
for c in range(len(props)):
cols[c][0] = widths[c] * '-'
cols[c].append(cols[c][0])
print '', string.ljust(props[c].name, widths[c]),
print
for r in xrange(len(view)+2):
f
|
or c in range(len(props)):
print '', justs[c](cols[c][r], widths[c]),
print
print " Total: %d rows" % len(view)
if _oldname == '__main__':
db = storage()
f = db.getas('frequents[drinker,bar,perweek:I]')
s = db.getas('serves[bar,beer,quantity:I]')
f.append(drinker='adam', bar='lolas', perweek=1)
f.append(drinker='woody', bar='cheers', pe
|
rweek=5)
f.append(drinker='sam', bar='cheers', perweek=5)
f.append(drinker='lola', bar='lolas', perweek=6)
s.append(bar='cheers', beer='bud', quantity=500)
s.append(bar='cheers', beer='samaddams', quantity=255)
s.append(bar='lolas', beer='mickies', quantity=1515)
dump(f, 'frequents:')
dump(s, 'serves:')
dump(f.join(s, s.bar), 'join on "bar":')
|
jwren/intellij-community
|
python/testData/inspections/PyUnresolvedReferencesInspection/UnusedImportsInPackage/p1/__init__.py
|
Python
|
apache-2.0
| 178
| 0.02809
|
f
|
rom .m1 import f
from p1.m1 import f
from m1 import f
from a import g
<warning descr="Unused import statement '
|
from a import h'">from a import h</warning>
__all__ = ['f', 'g']
|
rorytrent/the-duke
|
duke/game.py
|
Python
|
gpl-3.0
| 499
| 0
|
import random
from .tiles import base
INITIAL_TILES = [
base.ASSASSIN, base.BOWMAN, base.CHAMPION, base.DRAGOON, base.FOOTMAN,
base.GENERAL, base.KNIGHT, base.LONGBOWMAN, base.MARSHALL, b
|
ase.PIKEMAN,
base.PIKEMAN, base.PRIEST, base.RANGER, base.SEER, base.WIZARD,
]
class Game(object):
def __init__(self, initial_tiles=INITIAL_TILES):
self.board = {}
self.bags = (initial
|
_tiles[:], initial_tiles[:])
for bag in self.bags:
random.shuffle(bag)
|
fpagyu/glory
|
pi/client.py
|
Python
|
mit
| 500
| 0.006
|
# coding: utf-8
from websocket impor
|
t create_connection
def ws_handler():
ws = create_connection("ws://localhost:8000/echo")
try:
# ws.send("Hello, world")
while 1:
result = ws.recv()
print(result)
except:
pass
finally:
ws.close()
# with create_connection("ws://localhost:8000/echo") as ws:
# ws.send("Hello world")
# result = ws.recv()
# print(result)
if __name__ == "__mai
|
n__":
ws_handler()
|
UPCnet/genweb.serveistic
|
genweb/serveistic/tests/test_ws_client_problems.py
|
Python
|
gpl-3.0
| 10,661
| 0.000188
|
# -*- coding: utf-8 -*-
"""Unit tests for the Web Service client."""
import base64
import datetime
import json
import unittest
from mock import patch, MagicMock
from requests.exceptions import ConnectionError
from genweb.serveistic.ws_client.problems import (
Client, ClientException, Problem)
class TestWSClient(unittest.TestCase):
def setUp(self):
self.client = Client(
endpoint='http://endpoint',
login_username='test-username',
login_password='test-password')
def test_get_headers(self):
# All header params are given
client = Client(
endpoint='#',
login_username='username',
login_password='password',
content_type='application/xml')
headers = client._get_headers()
self.assertEqual(headers, {
'Content-type': 'application/xml',
'login.username': 'username',
'login.password': 'password',
'Authorization': "Basic {0}".format(
base64.b64encode("username:password"))})
# Only mandatory params are given
client = Client(
endpoint='#',
login_username='username',
login_password='password')
headers = client._get_headers()
self.assertEqual(headers, {
'Content-type': 'application/json',
'login.username': 'username',
'login.password': 'password',
'Authorization': "Basic {0}".format(
base64.b64encode("username:password"))})
# login.username and login.password are None
client = Client(
endpoint='#',
login_username=None,
login_password=None)
headers = client._get_headers()
self.assertEqual(headers, {
'Content-type': 'application/json',
'login.username': '',
'login.password': '',
'Authorization': "Basic {0}".format(base64.b64encode(':'))})
def test_parse_response_result_empty(self):
response = json.loads('{}')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"'resultat' is not present in the response",
cexception.message)
def test_parse_response_result_with_undefined_exception(self):
response = json.loads('''
{
"resultat": "ERROR"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code UNDEFINED: Undefined",
cexception.message)
def test_parse_response_result_with_defined_exception(self):
respons
|
e = json.loads('''
{
"resultat": "ERROR",
"resultatMissatge": "This is the message"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexce
|
ption:
self.assertEqual(
"Error code UNDEFINED: This is the message",
cexception.message)
response = json.loads('''
{
"resultat": "ERROR",
"codiError": "5"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code 5: Undefined",
cexception.message)
response = json.loads('''
{
"resultat": "ERROR",
"codiError": "5",
"resultatMissatge": "This is the message"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code 5: This is the message",
cexception.message)
def test_parse_response_list_problems_empty(self):
response = json.loads('''
{
"resultat": "SUCCESS",
"resultatMissatge": "This is the message"
}''')
try:
self.client._parse_response_list_problems(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"'llistaProblemes' is not present in the response",
cexception.message)
def test_parse_response_list_problems_not_empty(self):
response = json.loads('''
{
"llistaProblemes":
[
{
"assumpte": "Gestió por VPN de gateway para servei atenció",
"productNom": "e-Connect",
"requirementId": "481897",
"creatPerId": "11235",
"productId": "33283",
"statusId": "PROBLEMA_OBERT",
"visiblePortalServeisTIC": "Y",
"descripcioProblema": "No es posible acceder a través de la vpn",
"creatPerNom": "Jose Antonio",
"creatPerCognom": "Tebar Garcia",
"dataCreacio": "2014-01-22 14:33:47.362",
"dataLimitResolucioString": "2014-02-12 11:13:07.152",
"idEmpresa": "1123",
"urlProblema": "/problemes/control/problemaDetallDadesGenerals"
},
{}
],
"resultat": "SUCCESS",
"resultatMissatge": "Llista problemes retornada"
}
''')
results = self.client._parse_response_list_problems(response)
self.assertEqual(len(results), 2)
self.assertEqual(
results[0],
Problem(
topic=u"Gestió por VPN de gateway para servei atenció",
description=u"No es posible acceder a través de la vpn",
url=u"/problemes/control/problemaDetallDadesGenerals",
date_creation=datetime.datetime(
2014, 01, 22, 14, 33, 47, 362000),
date_fix=datetime.datetime(
2014, 02, 12, 11, 13, 07, 152000)))
self.assertEqual(
results[1],
Problem(
topic=u'',
description=u'',
url=u'',
date_creation=u'',
date_fix=u''))
def test_parse_response_list_problems_wrong_format(self):
response = json.loads('''
{
"llistaProblemes":
[
{
"assumpte": "Gestió por VPN de gateway para servei atenció",
"descripcioProblema": "No es posible acceder a través de la vpn",
"dataCreacio": "2014/01/22 14:33:47.362",
"urlProblema": "/problemes/control/problemaDetallDadesGenerals"
}
],
"resultat": "SUCCESS",
"resultatMissatge": "Llista problemes retornada"
}
''')
results = self.client._parse_response_list_problems(response)
self.assertEqual(len(results), 1)
self.assertEqual(
results[0],
Problem(
topic=u"Gestió por VPN de gateway para servei atenció",
description=u"No es posible acceder a través de la vpn",
url=u"/problemes/control/problemaDetallDadesGenerals",
date_creation=u'',
date_fix=u''))
def test_list_problems(self):
# Parameter product_id empty
try:
self.client.list_problems(" \n \t ")
self.fail("ClientException should have been raised")
except ClientException as exception:
self.assertEqual("Parameter 'product_id' cannot be empty",
exception.message)
try:
self.client.list_problems(None)
self.fail("ClientException should have been raised")
except ClientException as exception:
self.assertEqual("Parameter 'product_id' cannot be empty
|
tectronics/evennia
|
src/utils/idmapper/base.py
|
Python
|
bsd-3-clause
| 19,327
| 0.00564
|
"""
Django ID mapper
Modified for Evennia by making sure that no model references
leave caching unexpectedly (no use if WeakRefs).
Also adds cache_size() for monitoring the size of the cache.
"""
import os, threading
#from twisted.internet import reactor
#from twisted.internet.threads import blockingCallFromThread
from twisted.internet.reactor import callFromThread
from django.core.exceptions import ObjectDoesNotExist, FieldError
from django.db.models.base import Model, ModelBase
from django.db.models.signals import post_save, pre_delete, post_syncdb
from src.utils.utils import dbref, get_evennia_pids, to_str
from manager import SharedMemoryManager
_FIELD_CACHE_GET = None
_FIELD_CACHE_SET = None
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
# determine if our current pid is different from the server PID (i.e.
# if we are in a subprocess or not)
from src import PROC_MODIFIED_OBJS
# get info about the current process and thread
_SELF_PID = os.getpid()
_SERVER_PID, _PORTAL_PID = get_evennia_pids()
_IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and not _SELF_PID in (_SERVER_PID, _PORTAL_PID)
_IS_MAIN_THREAD = threading.currentThread().getName() == "MainThread"
#_SERVER_PID = None
#_PORTAL_PID = None
# #global _SERVER_PID, _PORTAL_PID, _IS_SUBPROCESS, _SELF_PID
# if not _SERVER_PID and not _PORTAL_PID:
# _IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and not _SELF_PID in (_SERVER_PID, _PORTAL_PID)
class SharedMemoryModelBase(ModelBase):
# CL: upstream had a __new__ method that skipped ModelBase's __new__ if
# SharedMemoryModelBase was not in the model class's ancestors. It's not
# clear what was the intended purpose, but skipping ModelBase.__new__
# broke things; in particular, default manager inheritance.
def __call__(cls, *args, **kwargs):
"""
this method will either create an instance (by calling the default implementation)
or try to retrieve one from the class-wide cache by infering the pk value from
args and kwargs. If instance caching is enabled for this class, the cache is
populated whenever possible (ie when it is possible to infer the pk value).
"""
def new_instance():
return super(SharedMemoryModelBase, cls).__call__(*args, **kwargs)
instance_key = cls._get_cache_key(args, kwargs)
# depending on the arguments, we might not be able to infer the PK, so in that case we create a new instance
if instance_key is None:
return new_instance()
cached_instance = cls.get_cached_instance(instance_key)
if cached_instance is None:
cached_instance = new_instance()
cls.cache_instance(cached_instance)
return cached_instance
def _prepare(cls):
cls.__instance_cache__ = {} #WeakValueDictionary()
super(SharedMemoryModelBase, cls)._prepare()
def __new__(cls, classname, bases, classdict, *args, **kwargs):
"""
Field shortcut creation:
Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key
This wrapper happens on the class level, so there is no overhead when creating objects. If a class
already has a wrapper of the given name, the automatic creation is skipped. Note: Remember to
document this auto-wrapping in the class header, this could seem very much like magic to the user otherwise.
"""
def create_wrapper(cls, fieldname, wrappername, editable=True, foreignkey=False):
"Helper method to create property wrappers with unique names (must be in separate call)"
def _get(cls, fname):
"Wrapper for getting database field"
#print "_get:", fieldname, wrapper
|
name,_GA(cls,fieldname)
return _GA(cls, fieldname)
def _get_foreign(cls, fname):
"Wrapper for returing foreignkey fields"
value = _GA(cls, fieldname)
#print "_get_foreign:value:", value
try:
retu
|
rn _GA(value, "typeclass")
except:
return value
def _set_nonedit(cls, fname, value):
"Wrapper for blocking editing of field"
raise FieldError("Field %s cannot be edited." % fname)
def _set(cls, fname, value):
"Wrapper for setting database field"
_SA(cls, fname, value)
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be set when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _set_foreign(cls, fname, value):
"Setter only used on foreign key relations, allows setting with #dbref"
try:
value = _GA(value, "dbobj")
except AttributeError:
pass
if isinstance(value, (basestring, int)):
value = to_str(value, force_string=True)
if (value.isdigit() or value.startswith("#")):
# we also allow setting using dbrefs, if so we try to load the matching object.
# (we assume the object is of the same type as the class holding the field, if
# not a custom handler must be used for that field)
dbid = dbref(value, reqhash=False)
if dbid:
model = _GA(cls, "_meta").get_field(fname).model
try:
value = model._default_manager.get(id=dbid)
except ObjectDoesNotExist:
# maybe it is just a name that happens to look like a dbid
pass
_SA(cls, fname, value)
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be set when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _del_nonedit(cls, fname):
"wrapper for not allowing deletion"
raise FieldError("Field %s cannot be edited." % fname)
def _del(cls, fname):
"Wrapper for clearing database field - sets it to None"
_SA(cls, fname, None)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
# wrapper factories
fget = lambda cls: _get(cls, fieldname)
if not editable:
fset = lambda cls, val: _set_nonedit(cls, fieldname, val)
elif foreignkey:
fget = lambda cls: _get_foreign(cls, fieldname)
fset = lambda cls, val: _set_foreign(cls, fieldname, val)
else:
fset = lambda cls, val: _set(cls, fieldname, val)
fdel = lambda cls: _del(cls, fieldname) if editable else _del_nonedit(cls,fieldname)
# assigning
classdict[wrappername] = property(fget, fset, fdel)
#type(cls).__setattr__(cls, wrappername, property(fget, fset, fdel))#, doc))
# exclude some models that should not auto-create wrapper fields
if cls.__name__ in ("ServerConfig", "TypeNick"):
return
# dynamically create the wrapper properties for all fields not already handled (manytomanyfields are always handlers)
for fieldname, field in ((fname, field) for fname, field in classdict.items()
if fname.startswith("db_") and type(field).__name__ != "ManyToManyField"):
foreignkey = type(field).__name__ == "ForeignKey"
|
futurice/vdsm
|
vdsm/kaxmlrpclib.py
|
Python
|
gpl-2.0
| 5,941
| 0.000842
|
#
# Copyright 2008-2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
"""xmlrpclib with a keep-alive transport.
Throws a timeout exception to the client when the underlying
TCP transport is broken.
Inspired by Volodymyr Orlenko,
http://blog.bjola.ca/2007/08/using-timeout-with-xmlrpclib.html
Sets up an xmlrpc Server with a modified Transport
(TcpkeepTransport) which uses a (slightly modified) HTTP
protocol (TcpkeepHTTP) that uses TcpkeepHTTPConnection when it
needs to set up a connection.
"""
import xmlrpclib
import httplib
import socket
# It would have been nicer to make these server-specific and not module-wide
# constants. But it is not really importat for it, so it should wait.
KEEPIDLE = 60
KEEPINTVL = 10
KEEPCNT = 6
CONNECTTIMEOUT = 160
def Server(url, *args, **kwargs):
kwargs['transport'] = TcpkeepTransport()
server = xmlrpclib.Server(url, *args, **kwargs)
return server
ServerProxy = Server
class TcpkeepTransport(xmlrpclib.Transport):
def make_connection(self, host):
if hasattr(xmlrpclib.Transport, "single_request"): # Python 2.7
return TcpkeepHTTPConnection(host)
else:
return TcpkeepHTTP(host)
class TcpkeepHTTPConnection(httplib.HTTPConnection):
def connect(self):
"""Connect to the host and port specified in __init__.
taken from httplib.HTTPConnection.connect(), with few additions for
connection timeout and keep-alive
after TCP_KEEPIDLE seconds of silence, TCP_KEEPCNT probes would be
sent, TCP_KEEPINTVL seconds apart of each other. If all of them
fail, the socket is closed."""
msg = "getaddrinfo returns an empty list"
for res in socket.getaddrinfo(self.host, self.port, 0,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
self.sock = socket.socket(af, socktype, proto)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
oldtimeout = self.sock.gettimeout() # added
self.sock.settimeout(CONNECTTIMEOUT) # added
self.sock.connect(sa)
self.sock.settimeout(oldtimeout) # added
except socket.error as msg:
if self.debuglevel > 0:
print 'connect fail:', (self.host, self.port)
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error(msg)
# beginning of added code
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, KEEPIDLE)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, KEEPINTVL)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, KEEPCNT)
# end of added code
class TcpkeepHTTP(httplib.HTTP):
_connection_class = TcpkeepHTTPConnection
###################
# the same, for ssl
from vdsm import sslutils
import ssl
def SslServer(url, ctx, *args, **kwargs):
kwargs['transport'] = TcpkeepSafeTransport(ctx)
server = xmlrpclib.Server(url, *args, **kwargs)
return server
SslServerProxy = SslServer
class TcpkeepSafeTranspor
|
t(sslutils.VerifyingSa
|
feTransport):
def make_connection(self, host):
chost, self._extra_headers, x509 = self.get_host_info(host)
if hasattr(xmlrpclib.SafeTransport, "single_request"): # Python 2.7
return TcpkeepHTTPSConnection(
chost, None, key_file=self.key_file, strict=None,
timeout=CONNECTTIMEOUT,
cert_file=self.cert_file, ca_certs=self.ca_certs,
cert_reqs=self.cert_reqs)
else:
return TcpkeepHTTPS(
chost, None, key_file=self.key_file,
cert_file=self.cert_file, ca_certs=self.ca_certs,
cert_reqs=self.cert_reqs)
class TcpkeepHTTPSConnection(sslutils.VerifyingHTTPSConnection):
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=CONNECTTIMEOUT,
ca_certs=None, cert_reqs=ssl.CERT_REQUIRED):
sslutils.VerifyingHTTPSConnection.__init__(
self, host, port=port, key_file=key_file, cert_file=cert_file,
strict=strict, timeout=timeout,
ca_certs=ca_certs, cert_reqs=cert_reqs)
def connect(self):
sslutils.VerifyingHTTPSConnection.connect(self)
# after TCP_KEEPIDLE seconds of silence, TCP_KEEPCNT probes would be
# sent, TCP_KEEPINTVL seconds apart of each other. If all of them fail,
# the socket is closed.
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, KEEPIDLE)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, KEEPINTVL)
self.sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, KEEPCNT)
class TcpkeepHTTPS(sslutils.VerifyingHTTPS):
_connection_class = TcpkeepHTTPSConnection
|
Xarxa6/hackathon
|
src/config.py
|
Python
|
mit
| 1,937
| 0.009809
|
import json
import os, sys, traceback
script_dir = os.path.dirname(__file__)
config_file = os.path.join(script_dir, './config/application.json')
defaults = os.path.join(script_dir, './config/reference.json')
def loadConfig(f):
print "[INFO] Loading configuration from " + f + "..."
with open(f, 'r') as opened:
configuration = json.load(opened)
dbConfig = configuration['analytics-db']
logConfig = configuration['log']
bootConfig = configuration['server']
return dbConfig, logConfig, bootConf
|
ig
# If dev is passed to init.py, this s
|
cript loads the db linked via docker/fig in /etc/hosts
if len(sys.argv) > 1 and sys.argv[1].upper() == 'DEV':
print "Booting API using /etc/hosts file..."
logConfig = {
"level" : "DEBUG",
"file" : ""
}
bootConfig = {
"host" : "0.0.0.0",
"port" : 5000
}
with open('/etc/hosts','r') as hosts:
found = False
for line in hosts:
if 'db' in line:
found = True
ip, hostname = line.strip('\n').split('\t')
dbConfig = {
"host" : ip,
"name" : "xarxa6",
"user" : "api",
"pass" : "1234" }
if found is False:
print "Hosts file:"
for line in hosts:
print line
raise EnvironmentError('Could not load linked db via /etc/hosts')
else:
try:
dbConfig, logConfig, bootConfig = loadConfig(config_file)
except:
print "[WARNING] Could not load application.json. Importing defaults..."
try:
dbConfig, logConfig, bootConfig = loadConfig(defaults)
except:
print "[ERROR] COULD NOT LOAD ANY CONFIG!"
print traceback.print_exc()
|
Zarthus/CloudBotRefresh
|
plugins/test/test_fishbans.py
|
Python
|
gpl-3.0
| 3,502
| 0.001428
|
import json
import pytest
from plugins.fishbans import fishbans, bancount
from cloudbot import http
test_user = "notch"
test_api = """
{"success":true,"stats":{"username":"notch","uuid":"069a79f444e94726a5befca90e38aaf5","totalbans":11,"service":{"mcbans":0,"mcbouncer":11,"mcblockit":0,"minebans":0,"glizer":0}}}
"""
test_api_single = """
{"success":true,"stats":{"username":"notch","uuid":"069a79f444e94726a5befca90e38aaf5","totalbans":1,"service":{"mcbans":0,"mcbouncer":1,"mcblockit":0,"minebans":0,"glizer":0}}}
"""
test_api_none = """
{"success":true,"stats":{"username":"notch","uuid":"069a79f444e94726a5befca90e38aaf5","totalbans":0,"service":{"mcbans":0,"mcbouncer":0,"mcblockit":0,"minebans":0,"glizer":0}}}
"""
test_api_failed = """
{"success":false}
"""
bans_reply = "The user \x02notch\x02 has \x0211\x02 bans - http://fishbans.com/u/notch/"
count_reply = "Bans for \x02notch\x02: mcbouncer: \x0211\x02 - http://fishbans.com/u/notch/"
bans_reply_single = "The user \x02notch\x02 has \x021\x02 ban - http://fishbans.com/u/notch/"
bans_reply_failed = "Could not fetch ban data for notch."
count_reply_failed = "Could not fetch ban data for notch."
bans_reply_none = "The user \x02notch\x02 has no bans - http://fishbans.com/u/notch/"
count_reply_none = "The user \x02notch\x02 has no bans - http://fishbans.com/u/notch/"
def test_bans(monkeypatch):
""" tests fishbans with a successful API response having multiple bans
"""
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api)
monkeypatch.setattr(http, "get_json", fake_http)
assert fishbans(test_
|
user) == bans_reply
def tes
|
t_bans_single(monkeypatch):
""" tests fishbans with a successful API response having a single ban
"""
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api_single)
monkeypatch.setattr(http, "get_json", fake_http)
assert fishbans(test_user) == bans_reply_single
def test_bans_failed(monkeypatch):
""" tests fishbans with a failed API response
"""
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api_failed)
monkeypatch.setattr(http, "get_json", fake_http)
assert fishbans(test_user) == bans_reply_failed
def test_bans_none(monkeypatch):
""" tests fishbans with a successful API response having no bans
"""
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api_none)
monkeypatch.setattr(http, "get_json", fake_http)
assert fishbans(test_user) == bans_reply_none
def test_count(monkeypatch):
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api)
monkeypatch.setattr(http, "get_json", fake_http)
assert bancount(test_user) == count_reply
def test_count_failed(monkeypatch):
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api_failed)
monkeypatch.setattr(http, "get_json", fake_http)
assert bancount(test_user) == count_reply_failed
def test_count_no_bans(monkeypatch):
def fake_http(url):
assert url == "http://api.fishbans.com/stats/notch/"
return json.loads(test_api_none)
monkeypatch.setattr(http, "get_json", fake_http)
assert bancount(test_user) == count_reply_none
|
rivalrockets/rivalrockets-api
|
app/api_1_0/app.py
|
Python
|
mit
| 3,161
| 0.000633
|
from flask_restful import Api
from . import api_blueprint
from ..api_1_0.resources.users import UserAPI, UserListAPI
from ..api_1_0.resources.authentication import (TokenRefresh, UserLogin,
UserLogoutAccess, UserLogoutRefresh)
from ..api_1_0.resources.machines import MachineAPI, MachineListAPI, \
UserMachineListAPI
from ..api_1_0.resources.revisions import RevisionAPI, RevisionListAPI, \
MachineRevisionListAPI
from ..api_1_0.resources.cinebenchr15results import \
CinebenchR15ResultAPI, CinebenchR15ResultListAPI, \
RevisionCinebenchR15ResultListAPI
from ..api_1_0.resources.futuremark3dmark06results import \
Futuremark3DMark06ResultAPI, \
Futuremark3DMark06ResultListAPI, \
RevisionFuturemark3DMark06ResultListAPI
from ..api_1_0.resources.futuremark3dmarkresults import \
Futuremark3DMarkResultAPI, Futuremark3DMarkResultListAPI, \
RevisionFuturemark3DMarkResultListAPI
api = Api(api_blueprint, catch_all_404s=True)
api.add_resource(UserListAPI, '/users', endpoint='users')
api.add_resource(UserAPI, '/users/<int:id>', endpoint='user')
api.add_resource(UserMachineListAPI, '/users/<int:id>/machines',
endpoint='user_machines')
api.add_resource(UserLogin, '/login')
api.add_resource(UserLogoutAccess, '/logout/access')
api.add_resource(UserLogoutRefresh, '/logout/refresh')
api.add_resource(TokenRefresh, '/tokenrefresh', endpoint='tokenrefresh')
api.add_resource(MachineListAPI, '/machines', endpoint='machines')
api.add_resource(MachineAPI, '/machines/<int:id>', endpoint='machine')
api.add_resource(RevisionListAPI, '/revisions', endpoint='revisions')
api.add_resource(RevisionAPI, '/revisions/<int:id>', endpoint='revision')
api.add_resource(MachineRevisionListAPI, '/machines/<int:id>/revisions',
endpoint='machine_revisions')
api.add_resource(CinebenchR15ResultListAPI, '/cinebenchr15results',
endpoint='cinebenchr15results')
api.add_resource(CinebenchR15ResultAPI, '/cinebenchr15results
|
/<int:id>',
endpoint='cinebenchr15result')
api.add_resource(RevisionCinebenchR15ResultListAPI,
'/revisions/<int:id>/cinebenchr15results',
endpoint='rev
|
ision_cinebenchr15results')
api.add_resource(Futuremark3DMark06ResultListAPI, '/futuremark3dmark06results',
endpoint='futuremark3dmark06results')
api.add_resource(Futuremark3DMark06ResultAPI,
'/futuremark3dmark06results/<int:id>',
endpoint='futuremark3dmark06result')
api.add_resource(RevisionFuturemark3DMark06ResultListAPI,
'/revisions/<int:id>/futuremark3dmark06results',
endpoint='revision_futuremark3dmark06results')
api.add_resource(Futuremark3DMarkResultListAPI, '/futuremark3dmarkresults',
endpoint='futuremark3dmarkresults')
api.add_resource(Futuremark3DMarkResultAPI,
'/futuremark3dmarkresults/<int:id>',
endpoint='futuremark3dmarkresult')
api.add_resource(RevisionFuturemark3DMarkResultListAPI,
'/revisions/<int:id>/futuremark3dmarkresults',
endpoint='revision_futuremark3dmarkresults')
|
hoaibang07/Webscrap
|
transcripture/sources/crawler_chuongthieu.py
|
Python
|
gpl-2.0
| 7,017
| 0.007981
|
# -*- encoding: utf-8 -*-
import io
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
import urllib2
import urlparse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
import os.path
zenmatePath = "/home/hbc/.mozilla/firefox/yeyuaq0s.default/extensions/firefox@zenmate.com.xpi"
ffprofile = webdriver.FirefoxProfile()
# ffprofile.set_preference("javascript.enabled", False)
# ffprofile.set_preference('permissions.default.image', 2)
# ffprofile.set_preference('permissions.default.stylesheet', 2)
# ffprofile.set_preference('dom.ipc.plugins.enabled.libflashplayer.so', 'false')
ffprofile.add_extension(zenmatePath)
ffprofile.add_extension('/home/hbc/Downloads/quickjava-2.0.6-fx.xpi')
ffprofile.set_preference("thatoneguydotnet.QuickJava.curVersion", "2.0.6.1") ## Prevents loading the 'thank you for installing screen'
ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.Images", 2) ## Turns images off
ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.AnimatedImage", 2) ## Turns animated images off
ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.CSS", 2) ## CSS
# ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.Cookies", 2) ## Cookies
ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.Flash", 2) ## Flash
ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.Java", 2) ## Java
# ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.JavaScript", 2) ## JavaScript
ffprofile.set_preference("thatoneguydotnet.QuickJava.startupStatus.Silverlight", 2) ## Silverlight
driver = webdriver.Firefox(ffprofile)
def _remove_div_vdx(soup):
for div in soup.find_all('div', class_='vidx'):
div.extract()
return soup
def get_data(urlchuong_list, i):
filename = 'urlsach/data/bosung/sach' + str(i) + '.txt'
ftmp = io.open(filename, 'w', encoding='utf-8')
try:
# hdrs = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Connection': 'keep-alive',
# 'Cookie': 'ipq_lip=20376774; ipq_set=1453874029; __atuvc=2%7C4; __utma=126044488.676620502.1453787537.1453787537.1453787537.1; __utmz=126044488.1453787537.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); PHPSESSID=ed3f4874b92a29b6ed036adfa5ad6fb3; ipcountry=us',
# 'Host': 'www.transcripture.com',
# 'Referer': 'http://www.transcripture.com/vietnamese-spanish-genesis-1.html',
# 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:43.0) Gecko/20100101 Firefox/43.0'
# }
count = 1
for urlchuong in urlchuong_list:
print('Dang get chuong %d, sach %d'%(count,i))
# urlchuong = '
|
http://www.transcripture.com/vietnamese-chinese-revelation-3.html'
|
# print urlchuong
# # create request
# req = urllib2.Request(urlchuong, headers=hdrs)
# # get response
# response = urllib2.urlopen(req)
# soup = BeautifulSoup(response.read())
# Load a page
driver.get(urlchuong)
# delay = 40 # seconds
# try:
# wait = WebDriverWait(driver, delay)
# path = '/html/body/center/div[1]/div[2]/div[4]/table/tbody/tr[2]/td[1]/div/div[1]/form[1]/select/option[66]'
# elem = driver.find_element_by_xpath(path)
# wait.until(EC.visibility_of(elem))
# print "Page is ready!"
# except TimeoutException:
# print "Loading took too much time!"
# #reload page
# body = driver.find_element_by_tag_name("body")
# body.send_keys(Keys.ESCAPE)
# body.send_keys(Keys.F5)
content = driver.page_source
soup = BeautifulSoup(content)
soup = _remove_div_vdx(soup)
# print soup
table_tag = soup.find_all('table', attrs={'width':'100%', 'cellspacing':'0'})[0]
tr_tags = table_tag.find_all('tr')
_len = len(tr_tags)
# in first tr tag:
h2_class = tr_tags[0].find_all('h2', class_='cphd')
ftmp.write(u'' + h2_class[0].get_text() + '|')
ftmp.write(u'' + h2_class[1].get_text() + '\n')
# print table_tag
for x in xrange(1,_len):
data = tr_tags[x].get_text('|')
# print data
# url_ec = url.encode('unicode','utf-8')
ftmp.write(u'' + data + '\n')
count = count + 1
# close file
ftmp.close()
except Exception, e:
print e
# close file
ftmp.close()
def check_numline(filename):
urlsach_list = []
urlsach_file = open(filename, 'r')
for line in urlsach_file:
urlsach_list.append(line.strip())
_len = len(urlsach_list)
return _len
def getsttchuongthieu(sachi):
list_stt = []
urlsach = 'urlsach/sach' + str(sachi) + '.txt'
#kiem tra so dong cua url sach, tuong ung voi so chuong
numline = check_numline(urlsach)
fname = 'urlsach/data/partcomplete/sach' + str(sachi) + '.txt'
#doc data tu file sach data
data = open(fname).read()
#kiem tra xem moi dong trong file sach data da co chuong cac so nay chua
for i in xrange(1,numline + 1):
key = str(i)
# print ('da chay den day')
if key not in data:
list_stt.append(i)
return list_stt
def getlisturlchuongthieu(sachi):
list_chuongthieu = []
list_stt = getsttchuongthieu(sachi)
fname = 'urlsach/sach' + str(sachi) + '.txt'
fp = open(fname)
lines=fp.readlines()
for stt in list_stt:
list_chuongthieu.append(lines[stt-1])
return list_chuongthieu
def main():
for x in xrange(1,67):
#kiem tra xem duong dan co trong thu muc partcomplete hay khong
f2name = 'urlsach/data/partcomplete/sach' + str(x) + '.txt'
if os.path.isfile(f2name):
list_urlchuongthieu = getlisturlchuongthieu(x)
get_data(list_urlchuongthieu, x)
if __name__ == '__main__':
# driver = webdriver.Firefox()
driver.get("about:blank")
# open new tab
# body = driver.find_element_by_tag_name("body")
# body.send_keys(Keys.CONTROL + 't')
# time.sleep(15)
print('Nhap vao mot ky tu bat ky de tiep tuc chuong trinh')
key = raw_input()
main()
# close the tab
driver.find_element_by_tag_name('body').send_keys(Keys.COMMAND + 'w')
driver.close()
# urlchuong_list = ['http://www.transcripture.com/vietnamese-chinese-exodus-1.html']
# get_data(urlchuong_list, 2)
|
python-zk/kazoo
|
kazoo/tests/test_queue.py
|
Python
|
apache-2.0
| 6,221
| 0
|
import uuid
import pytest
from kazoo.testing import KazooTestCase
from kazoo.tests.util import CI_ZK_VERSION
class KazooQueueTests(KazooTestCase):
def _makeOne(self):
path = "/" + uuid.uuid4().hex
return self.client.Queue(path)
def test_queue_validation(self):
queue = self._makeOne()
with pytest.raises(TypeError):
queue.put({})
with pytest.raises(TypeError):
queue.put(b"one", b"100")
with pytest.raises(TypeError):
queue.put(b"one", 10.0)
with pytest.raises(ValueError):
queue.put(b"one", -100)
with pytest.raises(ValueError):
queue.put(b"one", 100000)
def test_empty_queue(self):
queue = self._makeOne()
assert len(queue) == 0
assert queue.get() is None
assert len(queue) == 0
def test_queue(self):
queue = self._makeOne()
queue.put(b"one")
queue.put(b"two")
queue.put(b"three")
assert len(queue) == 3
|
assert queue.get() == b"one"
assert queue.get() == b"two"
assert queue.get() == b"three"
assert len(queue) == 0
def test_priority(self):
queue = self._makeOne()
queue.put(b"four", priority=101)
queue.put(b"one", priority=0)
queue.put(b"two", priority=0)
queue.put(b"three", priority=10)
assert queue.get() == b"one"
assert queu
|
e.get() == b"two"
assert queue.get() == b"three"
assert queue.get() == b"four"
class KazooLockingQueueTests(KazooTestCase):
def setUp(self):
KazooTestCase.setUp(self)
skip = False
if CI_ZK_VERSION and CI_ZK_VERSION < (3, 4):
skip = True
elif CI_ZK_VERSION and CI_ZK_VERSION >= (3, 4):
skip = False
else:
ver = self.client.server_version()
if ver[1] < 4:
skip = True
if skip:
pytest.skip("Must use Zookeeper 3.4 or above")
def _makeOne(self):
path = "/" + uuid.uuid4().hex
return self.client.LockingQueue(path)
def test_queue_validation(self):
queue = self._makeOne()
with pytest.raises(TypeError):
queue.put({})
with pytest.raises(TypeError):
queue.put(b"one", b"100")
with pytest.raises(TypeError):
queue.put(b"one", 10.0)
with pytest.raises(ValueError):
queue.put(b"one", -100)
with pytest.raises(ValueError):
queue.put(b"one", 100000)
with pytest.raises(TypeError):
queue.put_all({})
with pytest.raises(TypeError):
queue.put_all([{}])
with pytest.raises(TypeError):
queue.put_all([b"one"], b"100")
with pytest.raises(TypeError):
queue.put_all([b"one"], 10.0)
with pytest.raises(ValueError):
queue.put_all([b"one"], -100)
with pytest.raises(ValueError):
queue.put_all([b"one"], 100000)
def test_empty_queue(self):
queue = self._makeOne()
assert len(queue) == 0
assert queue.get(0) is None
assert len(queue) == 0
def test_queue(self):
queue = self._makeOne()
queue.put(b"one")
queue.put_all([b"two", b"three"])
assert len(queue) == 3
assert not queue.consume()
assert not queue.holds_lock()
assert queue.get(1) == b"one"
assert queue.holds_lock()
# Without consuming, should return the same element
assert queue.get(1) == b"one"
assert queue.consume()
assert not queue.holds_lock()
assert queue.get(1) == b"two"
assert queue.holds_lock()
assert queue.consume()
assert not queue.holds_lock()
assert queue.get(1) == b"three"
assert queue.holds_lock()
assert queue.consume()
assert not queue.holds_lock()
assert not queue.consume()
assert len(queue) == 0
def test_consume(self):
queue = self._makeOne()
queue.put(b"one")
assert not queue.consume()
queue.get(0.1)
assert queue.consume()
assert not queue.consume()
def test_release(self):
queue = self._makeOne()
queue.put(b"one")
assert queue.get(1) == b"one"
assert queue.holds_lock()
assert queue.release()
assert not queue.holds_lock()
assert queue.get(1) == b"one"
assert queue.consume()
assert not queue.release()
assert len(queue) == 0
def test_holds_lock(self):
queue = self._makeOne()
assert not queue.holds_lock()
queue.put(b"one")
queue.get(0.1)
assert queue.holds_lock()
queue.consume()
assert not queue.holds_lock()
def test_priority(self):
queue = self._makeOne()
queue.put(b"four", priority=101)
queue.put(b"one", priority=0)
queue.put(b"two", priority=0)
queue.put(b"three", priority=10)
assert queue.get(1) == b"one"
assert queue.consume()
assert queue.get(1) == b"two"
assert queue.consume()
assert queue.get(1) == b"three"
assert queue.consume()
assert queue.get(1) == b"four"
assert queue.consume()
def test_concurrent_execution(self):
queue = self._makeOne()
value1 = []
value2 = []
value3 = []
event1 = self.client.handler.event_object()
event2 = self.client.handler.event_object()
event3 = self.client.handler.event_object()
def get_concurrently(value, event):
q = self.client.LockingQueue(queue.path)
value.append(q.get(0.1))
event.set()
self.client.handler.spawn(get_concurrently, value1, event1)
self.client.handler.spawn(get_concurrently, value2, event2)
self.client.handler.spawn(get_concurrently, value3, event3)
queue.put(b"one")
event1.wait(0.2)
event2.wait(0.2)
event3.wait(0.2)
result = value1 + value2 + value3
assert result.count(b"one") == 1
assert result.count(None) == 2
|
jameswatt2008/jameswatt2008.github.io
|
python/Python基础/截图和代码/面对对象-2/10-多继承.py
|
Python
|
gpl-2.0
| 259
| 0.015444
|
class Base(object):
def test(self):
print("----Base")
class A(Base):
def test1(self):
print("---
|
--test1")
class B(Base):
def
|
test2(self):
print("-----test2")
class C(A,B):
pass
c = C()
c.test1()
c.test2()
c.test()
|
kubevirt/client-python
|
kubevirt/models/k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.py
|
Python
|
apache-2.0
| 3,286
| 0.000609
|
# coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class K8sIoApimachineryPkgApisMetaV1RootPaths(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute na
|
me
and the value is attribute type.
attribute_map (dict)
|
: The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'paths': 'list[str]'
}
attribute_map = {
'paths': 'paths'
}
def __init__(self, paths=None):
"""
K8sIoApimachineryPkgApisMetaV1RootPaths - a model defined in Swagger
"""
self._paths = None
self.paths = paths
@property
def paths(self):
"""
Gets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:return: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:rtype: list[str]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:param paths: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:type: list[str]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, K8sIoApimachineryPkgApisMetaV1RootPaths):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
sutartmelson/girder
|
tests/cases/routetable_test.py
|
Python
|
apache-2.0
| 6,390
| 0.002034
|
#
|
!/usr/bin/env python
# -*- coding: utf-
|
8 -*-
###############################################################################
# Copyright 2016 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import os
from .. import base
from girder.constants import GIRDER_ROUTE_ID, GIRDER_STATIC_ROUTE_ID, SettingKey
def setUpModule():
base.mockPluginDir(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_plugins'))
base.enabledPlugins.append('has_webroot')
base.startServer()
def tearDownModule():
base.stopServer()
class RouteTableTestCase(base.TestCase):
def setUp(self):
super(RouteTableTestCase, self).setUp()
self.admin = self.model('user').createUser(
email='admin@email.com', login='admin', firstName='Admin',
lastName='Admin', password='password', admin=True)
def testRouteTableSettings(self):
# Test Girder not having a route
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({})
}, method='PUT', user=self.admin)
self.assertStatus(resp, 400)
self.assertEqual(resp.json['message'], 'Girder and static root must be routeable.')
# Test static not having a route
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({GIRDER_ROUTE_ID: '/'})
}, method='PUT', user=self.admin)
self.assertStatus(resp, 400)
self.assertEqual(resp.json['message'], 'Girder and static root must be routeable.')
# Test duplicate routes
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({GIRDER_ROUTE_ID: '/some_route',
GIRDER_STATIC_ROUTE_ID: '/static',
'other': '/some_route'})
}, method='PUT', user=self.admin)
self.assertStatus(resp, 400)
self.assertEqual(resp.json['message'], 'Routes must be unique.')
# Test invalid routes
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({GIRDER_ROUTE_ID: '/',
GIRDER_STATIC_ROUTE_ID: '/static',
'other': 'route_without_a_leading_slash'})
}, method='PUT', user=self.admin)
self.assertStatus(resp, 400)
self.assertEqual(resp.json['message'], 'Routes must begin with a forward slash.')
# This is already set by default, this just verifies the endpoint is working
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({GIRDER_ROUTE_ID: '/',
GIRDER_STATIC_ROUTE_ID: '/static',
'has_webroot': '/has_webroot'})
}, method='PUT', user=self.admin)
self.assertStatusOk(resp)
resp = self.request('/has_webroot', prefix='', isJson=False, appPrefix='/has_webroot')
self.assertStatusOk(resp)
self.assertTrue('some webroot' in self.getBody(resp))
# girder should be at /
resp = self.request('/', prefix='', isJson=False)
self.assertStatusOk(resp)
self.assertTrue('g-global-info-apiroot' in self.getBody(resp))
# has_webroot is mounted on /has_weboort
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE
}, user=self.admin)
self.assertStatusOk(resp)
self.assertTrue('has_webroot' in resp.json)
self.assertEqual(resp.json['has_webroot'], '/has_webroot')
# has_webroot is set to be mounted on /has_webroot even after removing it from the list of
# enabled plugins.
base.enabledPlugins.remove('has_webroot')
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE
}, user=self.admin)
self.assertStatusOk(resp)
self.assertTrue('has_webroot' in resp.json)
self.assertEqual(resp.json['has_webroot'], '/has_webroot')
base.enabledPlugins.append('has_webroot')
# Only when has_webroot has been explicitly removed by the user is its route table entry
# cleared.
resp = self.request('/system/plugins', params={
'plugins': json.dumps([
plugin for plugin in base.enabledPlugins if plugin != 'has_webroot'])
}, method='PUT', user=self.admin)
self.assertStatusOk(resp)
# now, confirm that the plugin's route table entry has actually been removed
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE
}, user=self.admin)
self.assertStatusOk(resp)
self.assertTrue('has_webroot' not in resp.json)
# Setting the static route to http should be allowed
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({GIRDER_ROUTE_ID: '/',
GIRDER_STATIC_ROUTE_ID: 'http://127.0.0.1/static'})
}, method='PUT', user=self.admin)
self.assertStatusOk(resp)
# but not to a relative path
resp = self.request('/system/setting', params={
'key': SettingKey.ROUTE_TABLE,
'value': json.dumps({GIRDER_ROUTE_ID: '/',
GIRDER_STATIC_ROUTE_ID: 'relative/static'})
}, method='PUT', user=self.admin)
self.assertStatus(resp, 400)
self.assertEqual(
resp.json['message'],
'Static root must begin with a forward slash or contain a URL scheme.')
|
petricm/DIRAC
|
WorkloadManagementSystem/Agent/OptimizerModule.py
|
Python
|
gpl-3.0
| 11,859
| 0.007589
|
########################################################################
# File : Optimizer.py
# Author : Stuart Paterson
########################################################################
"""
The Optimizer base class is an agent that polls for jobs with a sp
|
ecific
status and minor status pair. The checkJob method is overridden for all
optimizer instances and associated actions are performed there.
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, exit as dExit
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities.ClassAd.ClassAdLight import ClassAd
|
from DIRAC.AccountingSystem.Client.Types.Job import Job as AccountingJob
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
class OptimizerModule(AgentModule):
"""
The specific agents must provide the following methods:
* initialize() for initial settings
* beginExecution()
* execute() - the main method called in the agent cycle
* endExecution()
* finalize() - the graceful exit of the method, this one is usually used
for the agent restart
"""
#############################################################################
def __init__(self, *args, **kwargs):
""" c'tor
"""
AgentModule.__init__(self, *args, **kwargs)
self.jobDB = None
self.logDB = None
self.startingMinorStatus = None
self.startingMajorStatus = "Checking"
self.failedStatus = None
self.requiredJobInfo = 'jdl'
self._initResult = None
def initialize(self, jobDB=None, logDB=None):
""" Initialization of the Optimizer Agent.
"""
self.jobDB = JobDB() if jobDB is None else jobDB
if not self.jobDB.isValid():
dExit(1)
self.logDB = JobLoggingDB() if logDB is None else logDB
optimizerName = self.am_getModuleParam('agentName')
if optimizerName.endswith('Agent'):
optimizerName = optimizerName[:-len('Agent')]
self.am_setModuleParam('optimizerName', optimizerName)
self.startingMinorStatus = self.am_getModuleParam('optimizerName')
self.failedStatus = self.am_getOption("FailedJobStatus", 'Failed')
self.am_setOption("PollingTime", 30)
return self.initializeOptimizer()
def initializeOptimizer(self):
""" To be overwritten by inheriting class
"""
return S_OK()
#############################################################################
def execute(self):
""" The main agent execution method
"""
result = self.initializeOptimizer()
if not result['OK']:
return result
self._initResult = result['Value']
condition = {'Status': self.startingMajorStatus}
if self.startingMinorStatus:
condition['MinorStatus'] = self.startingMinorStatus
result = self.jobDB.selectJobs(condition)
if not result['OK']:
self.log.warn('Failed to get a job list from the JobDB')
return S_ERROR('Failed to get a job list from the JobDB')
if not result['Value']:
self.log.verbose('No pending jobs to process')
return S_OK('No work to do')
for job in result['Value']:
result = self.getJobDefinition(job)
if not result['OK']:
self.setFailedJob(job, result['Message'], '')
continue
jobDef = result['Value']
result = self.optimizeJob(job, jobDef['classad'])
return S_OK()
#############################################################################
def optimizeJob(self, job, classAdJob):
""" Call the corresponding Optimizer checkJob method
"""
self.log.info('Job %s will be processed by %sAgent' % (job, self.am_getModuleParam('optimizerName')))
result = self.checkJob(job, classAdJob)
if not result['OK']:
self.setFailedJob(job, result['Message'], classAdJob)
return result
#############################################################################
def getJobDefinition(self, job, jobDef=False):
""" Retrieve JDL of the Job and return jobDef dictionary
"""
if not jobDef:
jobDef = {}
# If not jdl in jobinfo load it
if 'jdl' not in jobDef:
if self.requiredJobInfo == 'jdlOriginal':
result = self.jobDB.getJobJDL(job, original=True)
if not result['OK']:
self.log.error("No JDL for job", "%s" % job)
return S_ERROR("No JDL for job")
jobDef['jdl'] = result['Value']
if self.requiredJobInfo == 'jdl':
result = self.jobDB.getJobJDL(job)
if not result['OK']:
self.log.error("No JDL for job", "%s" % job)
return S_ERROR("No JDL for job")
jobDef['jdl'] = result['Value']
# Load the classad if needed
if 'jdl' in jobDef and 'classad' not in jobDef:
try:
classad = ClassAd(jobDef['jdl'])
except BaseException:
self.log.debug("Cannot load JDL")
return S_ERROR('Illegal Job JDL')
if not classad.isOK():
self.log.debug("Warning: illegal JDL for job %s, will be marked problematic" % (job))
return S_ERROR('Illegal Job JDL')
jobDef['classad'] = classad
return S_OK(jobDef)
#############################################################################
def getOptimizerJobInfo(self, job, reportName):
"""This method gets job optimizer information that will
be used for
"""
self.log.verbose("self.jobDB.getJobOptParameter(%s,'%s')" % (job, reportName))
result = self.jobDB.getJobOptParameter(job, reportName)
if result['OK']:
value = result['Value']
if not value:
self.log.warn('JobDB returned null value for %s %s' % (job, reportName))
return S_ERROR('No optimizer info returned')
else:
try:
return S_OK(eval(value))
except BaseException as x:
return S_ERROR('Could not evaluate optimizer parameters: %s' % repr(x))
return result
#############################################################################
def setOptimizerJobInfo(self, job, reportName, value):
"""This method sets the job optimizer information that will subsequently
be used for job scheduling and TURL queries on the WN.
"""
self.log.verbose("self.jobDB.setJobOptParameter(%s,'%s','%s')" % (job, reportName, value))
if not self.am_Enabled():
return S_OK()
return self.jobDB.setJobOptParameter(job, reportName, str(value))
#############################################################################
def setOptimizerChain(self, job, value):
"""This method sets the job optimizer chain, in principle only needed by
one of the optimizers.
"""
self.log.verbose("self.jobDB.setOptimizerChain(%s,%s)" % (job, value))
if not self.am_Enabled():
return S_OK()
return self.jobDB.setOptimizerChain(job, value)
#############################################################################
def setNextOptimizer(self, job):
"""This method is executed when the optimizer instance has successfully
processed the job. The next optimizer in the chain will subsequently
start to work on the job.
"""
result = self.logDB.addLoggingRecord(job, status=self.startingMajorStatus,
minor=self.startingMinorStatus,
source=self.am_getModuleParam("optimizerName"))
if not result['OK']:
self.log.warn(result['Message'])
self.log.verbose("self.jobDB.setNextOptimizer(%s,'%s')" % (job, self.am_getModuleParam("optimizerName")))
return self.jobDB.setNextOptimizer(job, self.am_getModuleParam("optimizerName"))
#############################################################################
def updateJobStatus(self, job, status, minorStatus=None, appStatus=None):
"""This method updates the job status in the JobDB, this should only be
used to fail jobs due to the optimizer chain.
"""
self.log.verbose("self.jobDB.setJobStatus(%s,'Status/Minor/Application','%s'/'%s'/'%s',update=True)" %
(job, status, str(minorStatus), str(appStatus)))
if not self.am_Enabled():
return S_OK()
res
|
maligulzar/Rstudio-instrumented
|
logserver/server.py
|
Python
|
agpl-3.0
| 793
| 0.020177
|
#!/usr/bin/env python2.4
#
# Copyright 2007 Google Inc. All Rights Reserved.
import BaseHTTPServer
import SimpleHTTPServer
import urllib
import random
import sys
import datetime
class MyHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
form = {}
now = datetime.datetime.now()
logname = "log/"+now.strftime("%Y%m%d") + ".txt"
if self.path.find('?') > -1:
queryStr = self.path.split('?')[1]
for queryParam in queryStr.split('&
|
'):
if queryParam.spli
|
t('=')[0] == "data":
f = open(logname, 'a')
f.write(urllib.unquote_plus(queryParam[5:] + ",\n"))
f.close()
self.wfile.flush()
self.connection.shutdown(1)
print sys.argv[1]
bhs = BaseHTTPServer.HTTPServer(('', int(sys.argv[1])), MyHandler)
bhs.serve_forever()
|
MicroPyramid/Django-CRM
|
tasks/migrations/0005_task_company.py
|
Python
|
mit
| 607
| 0
|
# Generated by Django 2.2.10 on 2020-04-27 12:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
|
dependencies = [
("common", "0021_document_company"),
("tasks", "0004_task_teams"),
]
operations = [
migrations.AddField(
model_name="task",
name="company",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.
|
models.deletion.SET_NULL,
to="common.Company",
),
),
]
|
ptroja/spark2014
|
testsuite/gnatprove/tests/riposte__usergroup_examples/test.py
|
Python
|
gpl-3.0
| 41
| 0.02439
|
from test_sup
|
port import *
prove_all ()
| |
leapp-to/prototype
|
tests/data/actor-api-tests/topics/apitest.py
|
Python
|
lgpl-2.1
| 82
| 0
|
f
|
rom leapp.topics import Topic
class ApiTestTopic(Topic):
name = 'api_tes
|
t'
|
deepmind/dm_robotics
|
py/agentflow/subtasks/integration_test.py
|
Python
|
apache-2.0
| 12,432
| 0.002252
|
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
"""Integration test for parameterized_subtask and termination components."""
from unittest import mock
from absl.testing import absltest
from absl.testing import parameterized
import dm_env
from dm_env import specs
from dm_robotics.agentflow import core
from dm_robotics.agentflow import subtask
from dm_robotics.agentflow import testing_functions
from dm_robotics.agentflow.preprocessors import rewards
from dm_robotics.agentflow.preprocessors import timestep_preprocessor
from dm_robotics.agentflow.subtasks import parameterized_subtask
from dm_robotics.agentflow.subtasks import subtask_termination
import numpy as np
class IntegrationTest(parameterized.TestCase):
def setUp(self):
super().setUp()
# Parameters for MaxStepsTermination.
self._max_steps = 10
self._max_steps_terminal_discount = 0.123
# Parameters for RewardThresholdTermination.
self._reward_threshold = 0.5
self._reward_threshold_terminal_discount = 0.
# Parameters for LeavingWorkspaceTermination.
self._tcp_pos_obs = 'tcp_pos'
self._workspace_center = np.array([0.2, -0.4, 0.6])
self._workspace_radius = 1.
self._leaving_workspace_terminal_discount = 0.456
self._tcp_observation_spec = {
self._tcp_pos_obs:
specs.BoundedArray(
shape=(3,),
name=self._tcp_pos_obs,
minimum=np.ones(3) * -10,
maximum=np.ones(3) * 10,
dtype=np.float64)
}
# Parameters for ThresholdedL2Reward.
self._reward_l2_threshold = 0.1
self._success_reward_value = 1.
self._world_center_obs = 'world_center'
self._world_center_observation_spec = {
self._world_center_obs:
specs.BoundedArray(
shape=(3,),
name=self._world_center_obs,
minimum=self._workspace_center,
maximum=self._workspace_center,
dtype=np.float64)
}
def _build(self, sparse_mode: bool):
# Configure all timestep preprocessors.
max_steps_tsp = subtask_termination.MaxStepsTermination(
self._max_steps, self._max_steps_terminal_discount)
leaving_workspace_tsp = subtask_termination.LeavingWorkspaceTermination(
self._tcp_pos_obs, self._workspace_center, self._workspace_radius,
self._leaving_workspace_terminal_discount)
reward_setting_tsp = rewards.ThresholdedL2Reward(
obs0=self._tcp_pos_obs, obs1=self._world_center_obs,
threshold=self._reward_l2_threshold, reward=self._success_reward_value)
reward_threshold_tsp = subtask_termination.RewardThresholdTermination(
self._reward_threshold,
self._reward_threshold_terminal_discount,
sparse_mode=sparse_mode)
# Compose to a single CompositeTimestepPreprocessor that runs in order.
self._preprocesors = [
max_steps_tsp, leaving_workspace_tsp, reward_setting_tsp,
reward_threshold_tsp
]
ts_preprocessor = timestep_preprocessor.CompositeTimestepPreprocessor(
*self._preprocesors)
# Create task spec and instantiate subtask.
self._action_spec = testing_functions.random_array_spec(shape=(4,))
random_action_space = core.IdentityActionSpace(self._action_spec)
random_observation_spec = testing_functions.random_observation_spec(
dtype=np.float64)
random_observation_spec.update(self._tcp_observation_spec)
random_observation_spec.update(self._world_center_observation_spec)
self._parent_spec = testing_functions.random_timestep_spec(
observation_spec=random_observation_spec)
self._subtask = parameterized_subtask.ParameterizedSubTask(
parent_spec=self._parent_spec,
action_space=random_action_space,
timestep_preprocessor=ts_preprocessor,
name='TestParameterizedSubTask')
# Update types on all reward and discount members for tests.
discount_type = self._parent_spec.discount_spec.dtype.type
reward_type = self._parent_spec.reward_spec.dtype.type
self._default_discount = discount_type(1.)
self._default_reward = reward_type(0.)
self._success_reward_value = reward_type(self._success_reward_value)
self._reward_threshold_terminal_discount = discount_type(
self._reward_threshold_terminal_discount)
self._max_steps_terminal_discount = discount_type(
self._max_steps_terminal_discount)
self._leaving_workspace_terminal_discount = discount_type(
self._leaving_workspace_terminal_discount)
def _run(self, default_tcp_obs, event_tcp_obs, event_step_idx,
expected_rewards, expected_discounts):
# Steps through an episode, setting `event_tcp_obs` on step `event_step_idx`
# and checks that the policy saw the correct rewards and discounts.
mock_policy = mock.MagicMock(spec=core.Policy)
valid_action = testing_functions.valid_value(self._action_spec)
mock_policy.step.return_value = valid_action
agent = subtask.SubTaskOption(sub_task=self._subtask,
agent=mock_policy, name='Subtask Option')
random_first_timestep = testing_functions.random_timestep(
self._parent_spec, step_type=dm_env.StepType.FIRST,
discount=self._default_discount)
random_first_timestep.observation[self._tcp_pos_obs] = default_tcp_obs
_ = agent.step(random_first_timestep)
for i in range(1, self._max_steps * 2):
random_mid_timestep = testing_functions.random_timestep(
self._parent_spec, step_type=dm_env.StepType.MID,
discount=self._default_discount)
if i == event_step_idx:
random_mid_timestep.observation[self._tcp_pos_obs] = event_tcp_obs
else:
random_mid_timestep.observation[self._tcp_pos_obs] = default_tcp_obs
_ = agent.step(random_mid_timestep)
if agent.pterm(random_mid_timestep) > np.random.rand():
random_last_timestep = testing_functions.random_timestep(
|
self._parent_spec, step_type=dm_env.StepType.LAST,
discount=self._default_discount)
# TCP doesn't have to be in-bounds for subtask to provide terminal
# reward, so just to verify we set back to out-of-bounds.
random_last_timestep.observation[self._tcp_pos_obs] = event_tcp_obs
_ = agent.step(random_last_timestep)
break
actual_rewards = [
|
call[0][0].reward for call in mock_policy.step.call_args_list
]
actual_discounts = [
call[0][0].discount for call in mock_policy.step.call_args_list
]
self.assertEqual(expected_rewards, actual_rewards)
self.assertEqual(expected_discounts, actual_discounts)
@parameterized.parameters([False, True])
def test_terminating_with_sparse_reward(self, sparse_mode: bool):
# Tests that agent sees proper reward and discount when solving the task.
self._build(sparse_mode)
# Make sure tcp is not within radius of target.
random_unit_offset = np.random.rand(3)
random_unit_offset /= np.linalg.norm(random_unit_offset)
within_bounds_obs = (
self._workspace_center + random_unit_offset *
(self._workspace_radius / 2.))
success_obs = (
self._workspace_center + random_unit_offset *
(self._reward_l2_threshold / 2.))
event_step_idx = 5
event_tcp_obs = success_obs
default_tcp_obs = within_bounds_obs
expected_event_reward = self._success_reward_value
expected_event_discount = self._reward_threshold_terminal_discount
# Expected results:
if sparse_mode:
# If `sparse_mode` then agent should see reward=0 and disco
|
anneline/Bika-LIMS
|
bika/lims/browser/widgets/querywidget.py
|
Python
|
agpl-3.0
| 5,599
| 0.000714
|
from AccessControl import ClassSecurityInfo
from archetypes.querywidget.views import WidgetTraverse as _WidgetTraverse
from archetypes.querywidget.widget import QueryWidget as _QueryWidget
from bika.lims.querystring.querybuilder import QueryBuilder
from bika.lims.querystring.querybuilder import RegistryConfiguration
from archetypes.querywidget.views import \
MultiSelectWidget as _MultiSelectWidget
from bika.lims.querystring.registryreader import QuerystringRegistryReader
from plone.app.querystring.interfaces import IQuerystringRegistryReader
from plone.registry.interfaces import IRegistry
from Products.Archetypes.Registry import registerWidget
from Products.Archetypes.Widget import TypesWidget
from Products.CMFCore.permissions import ModifyPortalContent, View
from zope.component import getMultiAdapter
from zope.component import getUtility
from zope.i18nmessageid import MessageFactory
from zope.interface import implements
_p = MessageFactory('plone')
class QueryWidget(_QueryWidget):
_properties = _QueryWidget._properties.copy()
_properties.update({
'macro': 'bika_widgets/querywidget',
'helper_css': ('++resource++archetypes.querywidget.querywidget.css',),
'helper_js': ('++resource++bika.lims.js/querywidget.js',
'@@datepickerconfig'),
'catalog_name': 'portal_catalog',
'registry_prefix': None,
})
security = ClassSecurityInfo()
def getConfig(self):
"""get the config"""
registry = getUtility(IRegistry)
prefix = self.registry_prefix
if prefix is not None:
# First grab the base config's operations
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "plone.app.querystring.operation"
plone_config = registryreader.parseRegistry()
# then merge custom fields
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = prefix
config = registryreader.parseRegistry()
config = registryreader.getVocabularyValues(config)
config.update(plone_config)
config = registryreader.mapOperations(config)
config = registryreader.mapSortableIndexes(config)
config = {
'indexes': config.get(prefix + '.field'),
'sortable_indexes': config.get('sortable'),
}
else:
# First grab the base config's operations
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "plone.app.querystring"
config = registryreader()
# Group indices by "group", order alphabetically
groupedIndexes = {}
for indexName in config['indexes']:
index = config['indexes'][indexName]
if index['enabled']:
group = index['group']
if group not in groupedIndexes:
groupedIndexes[group] = []
groupedIndexes[group].append((index['title'], indexName))
# Sort each index list
[a.sort() for a in groupedIndexes.values()]
config['groupedIndexes'] = groupedIndexes
return config
def SearchResults(self, request, context, accessor):
"""search results"""
options = dict(original_context=context)
res = getMultiAdapter((accessor(), request),
name='display_query_results')
return res(**options)
registerWidget(QueryWidget, title='Query',
description=('Field for storing a query'))
class WidgetTraverse(_WidgetTraverse):
def getConfig(self):
"""get the config"""
registry = getUtility(IRegistry)
# First grab the base config, so we can use the operations
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "plone.app.querystring.operation"
op_config =
|
registryreader.parseRegistry()
# Then combine our fields
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "bika.lims.bika_catalog_query"
config = registryreader.parseRegistry()
config = registryreader.getVocabularyValues(config)
config.update(op_config)
registryreader.mapOperations(config)
registryreader.mapSortableIndexes(con
|
fig)
config = {
'indexes': config.get('bika.lims.bika_catalog_query.field'),
'sortable_indexes': config.get('sortable'),
}
# Group indices by "group", order alphabetically
groupedIndexes = {}
for indexName in config['indexes']:
index = config['indexes'][indexName]
if index['enabled']:
group = index['group']
if group not in groupedIndexes:
groupedIndexes[group] = []
groupedIndexes[group].append((index['title'], indexName))
# Sort each index list
[a.sort() for a in groupedIndexes.values()]
config['groupedIndexes'] = groupedIndexes
return config
class MultiSelectWidget(WidgetTraverse):
def getValues(self, index=None):
config = self.getConfig()
if not index:
index = self.request.form.get('index')
values = None
if index is not None:
values = config['indexes'][index]['values']
return values
def getSortedValuesKeys(self, values):
# do a lowercase sort of the keys
return sorted(values.iterkeys(), key = lambda x : x.lower())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.