repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
arteria/django-userproperty | userproperty/utils.py | Python | bsd-2-clause | 7,025 | 0.001993 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from userproperty.models import UserProperty, GlobalProperty
def setIntegerProperty(request, name="", value=0, anUser=None):
"""
The user must not be an anonymous user!
"""
try:
if anUser is None:
theUser = request.user
else:
theUser = anUser
if theUser.is_anonymous():
return False
try:
un = UserProperty.objects.get(user=theUser, name=name)
un.value = value
except UserProperty.DoesNotExist:
un = UserProperty(user=theUser, name=name, value=value)
un.save()
return True
except:
pass
return False
def getIntegerProperty(request, name="", value=0, anUser=None):
"""
The user must not be an anonymous user!
If the user is an anonymous user, the default value (value) will be returned.
"""
if anUser is None:
theUser = request.user
else:
theUser = anUser
if theUser.is_anonymous():
return value
try:
un = UserProperty.objects.get(user=theUser, name=name)
value = un.value
return value
except UserProperty.DoesNotExist:
return value
return None
def addProperty(request, name="", anUser=None):
"""Add a property for a non anonymous user.
"""
try:
if anUser is None:
theUser = request.user
else:
theUser = anUser
if theUser.is_anonymous():
return False
try:
un = UserProperty.objects.get(user=theUser, name=name)
un.value = 1
except UserProperty.DoesNotExist:
un = UserProperty(user=theUser, name=name, value=1)
un.save()
return True
except:
return False
def removeProperty(request, name="", anUser=None):
"""
Remove a poperty for a not anonymous user.
"""
if anUser is None:
theUser = request.user
else:
theUser = anUser
if theUser.is_anonymous():
return False
try:
un = UserProperty.objects.get(user=theUser, name=name)
un.delete()
except UserProperty.DoesNotExist:
pass
def getAllProperties(request):
"""
All properties for non anonymous user.
"""
if request.user.is_anonymous():
return None
try:
un = UserProperty.objects.filter(user=request.user)
return un
except UserProperty.DoesNotExist:
pass
return None
def dropAllPropertiesForUser(anUser=None):
"""
Removes all properties for the given non anonymous user.
"""
if anUser.is_anonymous():
return None
UserProperty.objects.filter(user=anUser).delete()
def getProperty(request, name="", anUser=None):
"""
Returns True if the user has this property set.
"""
try:
if anUser is None:
un = UserProperty.objects.get(user=request.user, name=name)
else:
un = UserProperty.objects.get(user=anUser, name=name)
return bool(un.value)
except UserProperty.DoesNotExist:
pass
return False
def getUsersWithProperty(name=""):
"""Returns a list of Users having a Property
"""
ans = []
for u in UserProperty.objects.filter(name=name):
ans.append(u.user)
return ans
def incUserProperty(request, name="", anUser=None, incrementBy=1):
"""
Works for non anonymous users only.
"""
try:
if anUser is None:
theUser = request.user
else:
theUser = anUser
if theUser.is_anonymous():
return False
try:
un = UserProperty.objects.get(user=theUser, name=name)
except UserProperty.DoesNotExist:
un = UserProperty(user=theUser, name=name, value=0)
un.value += incrementBy
un.save()
return True
except:
return False
def decUserProperty(request, name="", anUser=None, decrementBy=1):
"""
Works for non anonymous users only.
"""
try:
if anUser is None:
theUser = request.user
else:
theUser = anUser
if theUser.is_anonymous():
return False
try:
un = UserProperty.objects.get(user=theUser, name=name)
except UserProperty.DoesNotExist:
un = UserProperty(user=theUser, name=name, value=0)
if un.value < 1:
pass
else:
un.value -= decrementBy
un.save()
return True
except:
return False
### Global Property ###
def setGlobalProperty(name="", value=0):
"""
"""
try:
try:
un = GlobalProperty.objects.get(name=name)
un.value = value
except GlobalProperty.DoesNotExist:
un = GlobalProperty(name=name, value=value)
un.save()
return True
except:
return F | alse
def getGlobalProperty(name="", value=0):
"""Returns True if this Property is set.
"""
try:
prop = GlobalProperty.objects.get(name=name)
return bool(prop.value)
except GlobalProperty.DoesNotExist:
pass
return False
def getIntegerGlobalProperty(name=""):
"""Returns Integer Value if this Property is set, else 0
"""
try:
| prop = GlobalProperty.objects.get(name=name)
return prop.value
except GlobalProperty.DoesNotExist:
pass
return 0
def incGlobalProperty(name="", incrementBy=1):
"""Increments value Field by given value, creates Property if DoesNotExist
"""
try:
try:
un = GlobalProperty.objects.get(name=name)
except GlobalProperty.DoesNotExist:
un = GlobalProperty(name=name, value=0)
un.value += incrementBy
un.save()
return True
except:
return False
def decGlobalProperty(name="", decrementBy=1):
"""Decrements value Field by given value, creates Property if DoesNotExist
"""
try:
try:
un = GlobalProperty.objects.get(name=name)
except GlobalProperty.DoesNotExist:
un = GlobalProperty(name=name, value=0)
if un.value < 1:
pass
else:
un.value -= decrementBy
un.save()
return True
except:
return False
# PEP8 #
### UserProperty ###
set_integer_property = setIntegerProperty
get_integer_property = getIntegerProperty
add_property = addProperty
remove_property = removeProperty
get_all_properties = getAllProperties
drop_all_properties_for_user = dropAllPropertiesForUser
get_property = getProperty
get_users_with_property = getUsersWithProperty
inc_user_property = incUserProperty
dec_user_property = decUserProperty
### Global Property ###
set_global_property = setGlobalProperty
get_global_property = getGlobalProperty
get_integer_global_property = getIntegerGlobalProperty
inc_global_property = incGlobalProperty
dec_global_property = decGlobalProperty
|
salimfadhley/jenkinsapi | jenkinsapi_tests/systests/test_invocation.py | Python | mit | 3,914 | 0 | """
System tests for `jenkinsapi.jenkins` module.
"""
import time
import logging
import pytest
from jenkinsapi.build import Build
from jenkinsapi.queue import QueueItem
from jenkinsapi_tests.test_utils.random_strings import random_string
from jenkinsapi_tests.systests.job_configs import LONG_RUNNING_JOB
from jenkinsapi_tests.systests.job_configs import SHORTISH_JOB, EMPTY_JOB
from jenkinsapi.custom_exceptions import BadParams, NotFound
log = logging.getLogger(__name__)
def test_invocation_object(jenkins):
job_name = 'Acreate_%s' % random_string()
job = jenkins.create_job(job_name, SHORTISH_JOB)
qq = jo | b.invoke()
assert isinstance(qq, QueueItem)
# Let Jenkins catchup
qq.block_until_building()
assert qq.get_build_number() == 1
def test_get_block_until_build_running(jenkins):
job_name = 'Bcreate_%s' % random_string()
job = jenkins.create_job(job_name, LONG_RUNNING_JOB)
qq = job.invoke()
| time.sleep(3)
bn = qq.block_until_building(delay=3).get_number()
assert isinstance(bn, int)
build = qq.get_build()
assert isinstance(build, Build)
assert build.is_running()
build.stop()
# if we call next line right away - Jenkins have no time to stop job
# so we wait a bit
time.sleep(1)
assert not build.is_running()
console = build.get_console()
assert isinstance(console, str)
assert 'Started by user' in console
def test_get_block_until_build_complete(jenkins):
job_name = 'Ccreate_%s' % random_string()
job = jenkins.create_job(job_name, SHORTISH_JOB)
qq = job.invoke()
qq.block_until_complete()
assert not qq.get_build().is_running()
def test_mi_and_get_last_build(jenkins):
job_name = 'Dcreate_%s' % random_string()
job = jenkins.create_job(job_name, SHORTISH_JOB)
for _ in range(3):
ii = job.invoke()
ii.block_until_complete(delay=2)
build_number = job.get_last_good_buildnumber()
assert build_number == 3
build = job.get_build(build_number)
assert isinstance(build, Build)
build = job.get_build_metadata(build_number)
assert isinstance(build, Build)
def test_mi_and_get_build_number(jenkins):
job_name = 'Ecreate_%s' % random_string()
job = jenkins.create_job(job_name, EMPTY_JOB)
for invocation in range(3):
qq = job.invoke()
qq.block_until_complete(delay=1)
build_number = qq.get_build_number()
assert build_number == invocation + 1
def test_mi_and_delete_build(jenkins):
job_name = 'Ecreate_%s' % random_string()
job = jenkins.create_job(job_name, EMPTY_JOB)
for invocation in range(3):
qq = job.invoke()
qq.block_until_complete(delay=1)
build_number = qq.get_build_number()
assert build_number == invocation + 1
# Delete build using Job.delete_build
job.get_build(1)
job.delete_build(1)
with pytest.raises(NotFound):
job.get_build(1)
# Delete build using Job as dictionary of builds
assert isinstance(job[2], Build)
del job[2]
with pytest.raises(NotFound):
job.get_build(2)
with pytest.raises(NotFound):
job.delete_build(99)
def test_give_params_on_non_parameterized_job(jenkins):
job_name = 'Ecreate_%s' % random_string()
job = jenkins.create_job(job_name, EMPTY_JOB)
with pytest.raises(BadParams):
job.invoke(build_params={'foo': 'bar', 'baz': 99})
def test_keep_build_toggle(jenkins):
job_name = 'Ecreate_%s' % random_string()
job = jenkins.create_job(job_name, EMPTY_JOB)
qq = job.invoke()
qq.block_until_complete(delay=1)
build = job.get_last_build()
assert not build.is_kept_forever()
build.toggle_keep()
assert build.is_kept_forever()
build_number = job.get_last_buildnumber()
job.toggle_keep_build(build_number)
build = job.get_last_build()
assert not build.is_kept_forever()
|
seece/modrip | lib/modtag/format.py | Python | mit | 4,161 | 0.033902 | from lib.modtag import *
from lib.modtag.tracker import *
from struct import unpack
class ModuleFormat():
"""Interface for different module formats."""
@property
def name(self):
raise NotImplementedError("module format must have a name")
@classmethod
def identify(cls, bytes):
"""Checks if the given bytes are in this very format"""
raise NotImplementedError("identify must be implemented")
@classmethod
def load_module(cls, bytes, options=None):
"""Returns a TrackerSong from file data"""
raise NotImplementedError("load_module must be implemented")
class ProtrackerFormat(ModuleFormat):
name = "Protracker"
@classmethod
def check_format(cls, modulebytes):
amigamagic = str(modulebytes[1080:1084], 'ascii')
if amigamagic == "M.K.":
return True
if amigamagic == "8CHN":
return True
if amigamagic == "28CH":
return True
return False
@classmethod
def identify(cls, bytes):
return ProtrackerFormat.check_format(bytes)
@classmethod
def get_protracker_orderlist(cls, songbytes):
sample_length = 22+2+1+1+2+2
songdata_ofs = 20+sample_length*31
orderlist_length = songbytes[songdata_ofs]+1
orderlist = []
for i in range(0, orderlist_length-1):
order = songbytes[songdata_ofs + 2 + i]
orderlist.append(order)
return orderlist
@classmethod
def parse_note(cls, notebytes):
note = Note()
a = notebytes[0]
b = notebytes[1]
c = notebytes[2]
d = notebytes[3]
note.instrument = (a & 0xf0) + (c >> 4)
effect = ((c & 0xf) << 8) + d
note.parameters = effect & 0xff
note.effect = effect >> 8
note.pitch = ((a & 0xf) << 8) + b
return note
@classmethod
def parse_pattern(cls, patternbytes, song):
pattern = Pattern()
pattern.length = 64
for i in range(song.num_channels-1):
pattern.rows | .append([])
for r in range(0, 63):
for c in range(song.num_channels-1):
ofs = r*song.num_channels*4 + c*4
#pattern.rows[c][i] = cls.parse_note(patternbytes[ofs:ofs+4])
pattern.rows[c].append(cls.parse_note(patternbytes[ofs:ofs+4]))
return pattern
# TODO add proper channel checks here
@classmethod
def get_num_channels(cls, songbytes):
return 4
@classmethod
def load_module(cls, songbytes, options=None):
#modformat = ProtrackerFormat.detect_module_format(songbytes)
| if not options:
options = {'verbose': False}
#if modformat != TrackerSong.PROTRACKER:
# return None
song = TrackerSong()
#song.fmt = modformat
#song.name = str(unpack('20s', songbytes[0:20]), 'ascii')
song.num_channels = cls.get_num_channels(songbytes)
song.name = str(songbytes[0:20], 'ascii').rstrip('\0')
sample_length = 22+2+1+1+2+2
for i in range(0, 30):
ofs = (20+i*sample_length)
samplechunk = songbytes[ofs:ofs+sample_length]
fields = unpack('>22sHBBHH', samplechunk)
sample = Sample()
sample.name = str(fields[0], 'ascii').rstrip('\0')
sample.length = fields[1]*2
sample.finetune= fields[2]
sample.volume = fields[3]
sample.repeat = fields[4]*2
sample.repeat_length = fields[5]*2
song.instruments.append(Instrument(sample))
if options['verbose']:
if (len(sample.name) > 0 or sample.length > 0):
print(str(i), " : ", sample.name)
print("\tlength: ", str(sample.length))
print("\tvolume: ", str(sample.volume))
songdata_ofs = 20+sample_length*31
song.orderlist = cls.get_protracker_orderlist(songbytes)
song.num_patterns = max(song.orderlist) + 1
patterndata_ofs = songdata_ofs + 128+4+2
pattern_size = song.num_channels * 64 * 4
sampledata_ofs = patterndata_ofs + song.num_patterns * pattern_size
for i in range(0, song.num_patterns-1):
ofs = patterndata_ofs + i*pattern_size
pattern = cls.parse_pattern(songbytes[ofs:ofs+pattern_size], song)
song.patterns.append(pattern)
# load sample data
sample_ofs = sampledata_ofs
for i in range(0, 30):
s = song.instruments[i].sample
if s.length == 0:
continue
s.data = songbytes[sample_ofs:sample_ofs+s.length]
sample_ofs += s.length
if options['verbose']:
print("orderlist: " + str(song.orderlist))
print("amount of patterns: " + str(song.num_patterns))
return song
|
DroneMapp/powerlibs-aws-sqs-dequeue_to_api | powerlibs/aws/sqs/dequeue_to_api/transformations.py | Python | mit | 1,829 | 0.000547 | import os
def url_get(dequeuer, url):
response = dequeuer.get(url)
response_data = response.json()
if 'results' in response_data:
return response_data['results']
else:
return [response_data]
def accumulate(dequeuer, payload, accumulators, url_getter=None):
last_level = [{'payload': payload}]
url_getter = url_getter or url_get
for step_name, base_url in accumulators:
# "ticket", "v1/tickets/{data[ticket]}"
new_level = []
url = base_url
for entry in last_level:
# last_level = [{"data": {"id": 1, "ticket": "2"}}, ...]
# 1- Find the right URL:
for entry_name, entry_values in entry.items():
# "data" , {"id": 1, "ticket": "2"}
url = url.format(**{entry_name: entry_values})
# 2- Save the URL:
base_url = dequeuer.config['base_url']
kwargs = {**entry, 'config': dequeuer.config}
real_url = os.path.join(base_url, url).format(**kwargs)
for result in url_getter(dequeuer, real_url):
new_entry = {step_name: result} # "ticket": {...}
new_entry.update(entry) # + "data": {...}
new_level.append(new_entry) # [{"ticket": {"id": 1}, "data": {...}}, {"ticket": {"id": 2}, "data": {...}}]
last_level = new_level
return last_level
def apply_data_map(data, data_map):
mapped = {}
for key, value in data.items():
if isinstance(value, (str, bytes)) and value.startswith('MAP:'):
_, *map_key_parts = value.split(':') # NOQA
map_key = ':'.join(map_key_parts)
if map_key and map_key in data_map:
| mapped[key] = data_ | map[map_key]
else:
mapped[key] = value
return mapped
|
xueguangl23/brownClustering | twoGramModel.py | Python | mit | 2,028 | 0.011834 | from collections import Counter
from itertools import chain
from parser import getAllWords
DEBUG = True
class TwoGramModel(object):
def __init__(self,wordlist):
if DEBUG: print("building bi-gram model.")
WBAG = set(wordlist)
self.N = len(WBAG)
def replaceLoFreq(sentence):
return tuple(map(lambda x:x if x in WBAG else 'UNK', sentence))
def combineTwoWord(sentence):
res = []
for i in range(len(sentence)-1):
res.append((sentence[i],sentence[i+1]))
return res
#allPurmutations = permutations(WBAG, 2)
#sentences = chain(map(replaceLoFreq, getAllWords()),allPurmutations)
sentences = tuple(map(replaceLoFreq,getAllWords()))
self.OneGramCounter = Counter(chain.from_iterable(sentences))
combinedSentence = map(combineTwoWord, sentences)
self.TwoGramCounter = Counter(chain.from_iterable(combinedSentence))
if DEBUG: print('bi-gram model build complete.')
def count(self,*args):
# e.g. M.count('there') M.count('there','is')
if len(args)==1:
return self.OneGramCounter[args[0]]
elif len(args)==2:
return self.TwoGramCounter[tuple(args)]
else:
print('WARNING: illegal access to the Model, trying to get {}'.format(args))
return -1
def merge(self,w1,w2):
# we merge w2 to w1, hence deleting c2
self.OneGramCounter[w1] += self.OneGramCounter[w2]
del self.OneGramCounter[w2]
for (x1,x2),count in tuple(self.TwoGramCounter.items()):
| if w2 not in (x1,x2):
continue
| if x1 == w2 and x2 == w2:
self.TwoGramCounter[(w1,w1)] += count
elif x1 == w2:
self.TwoGramCounter[(w1,x2)] += count
elif x2 == w2:
self.TwoGramCounter[(x1,w1)] += count
del self.TwoGramCounter[(x1,x2)]
if __name__ == '__main__':
M = TwoGramModel([])
|
vathpela/anaconda | pyanaconda/modules/boss/boss_interface.py | Python | gpl-2.0 | 3,496 | 0.000286 | # boss_interface.py
# Anaconda main DBUS module & module manager.
#
# Copyright (C) 2017 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from pyanaconda.dbus.interface import dbus_interface
from pyanaconda.modules.common.constants.interfaces import BOSS_ANACONDA
from pyanaconda.modules.common.constants.services import BOSS
from pyanaconda.dbus.template import InterfaceTemplate
from pyanaconda.dbus.typing import * # pylint: disable=wildcard-import
@dbus_interface(BOSS.interface_na | me)
class BossInterface(InterfaceTemplate):
"""DBus interface for the Boss."""
def InstallSystemWithTask(self) -> ObjPath:
"""Install the system.
:retur | n: a DBus path of the main installation task
"""
return self.implementation.install_system_with_task()
def Quit(self):
"""Stop all modules and then stop the boss."""
self.implementation.stop()
@dbus_interface(BOSS_ANACONDA.interface_name)
class AnacondaBossInterface(BossInterface):
"""Temporary extension of the boss for anaconda.
Used for synchronization with anaconda during transition.
"""
def StartModules(self, service_names: List[Str], addons_enabled: Bool):
"""Start the given kickstart modules.
:param service_names: a list of service names
:param addons_enabled: should we start addons?
"""
self.implementation.start_modules(service_names, addons_enabled)
@property
def AllModulesAvailable(self) -> Bool:
"""Returns true if all modules are available."""
return self.implementation.all_modules_available
@property
def UnprocessedKickstart(self) -> Str:
"""Returns kickstart containing parts that are not handled by any module."""
return self.implementation.unprocessed_kickstart
def SplitKickstart(self, path: Str):
"""Splits the kickstart for modules.
:raises SplitKickstartError: if parsing fails
"""
self.implementation.split_kickstart(path)
def DistributeKickstart(self) -> List[Dict[Str, Variant]]:
"""Distributes kickstart to modules synchronously.
Assumes all modules are started.
:returns: list of kickstart errors
"""
results = self.implementation.distribute_kickstart()
return [{
"module_name": get_variant(Str, result["module_name"]),
"file_name": get_variant(Str, result["file_name"]),
"line_number": get_variant(Int, result["line_number"]),
"error_message": get_variant(Str, result["error_message"])
} for result in results]
|
digling/cddb | datasets/Allen2007/__init__.py | Python | gpl-3.0 | 1,690 | 0.005329 | from pycddb.dataset import Dataset
from lingpy import Wordlist, csv2list
from lingpy.compare.partial import _get_slices
def prepare(ds):
errs = 0
wl = Wordlist(ds.raw('bds.tsv'))
W = {}
for k in wl:
value = wl[k, 'value']
tokens = wl[k, 'tokens']
doc = wl[k, 'docul | ect']
| if value:
morphemes = []
for a, b in _get_slices(wl[k, 'tokens']):
ipa = ''.join(tokens[a:b])
morphemes += [ipa]
ipa = ' '.join(morphemes)
clpa = ds.transform(ipa, 'CLPA')
struc = ds.transform(ipa, 'Structure')
try:
assert len(clpa.split(' ')) == len(struc.split(' '))
except:
errs += 1
print(errs, clpa, struc)
if '«' in clpa:
errs += 1
print(errs, ipa, clpa, struc)
W[k] = [doc, wl[k, 'concept'], wl[k, 'concepticon_id'], value,
clpa, struc, wl[k, 'partial_ids']]
W[0] = ['doculect', 'concept', 'concepticon_id', 'value', 'segments', 'structure', 'cogids']
ds.write_wordlist(Wordlist(W))
def inventories(ds):
data = csv2list(ds.raw('inv.tsv'))
header = data[0]
invs = {l: [] for l in ds.languages}
for i, line in enumerate(data[1:]):
stype, sis, ipa, struc = line[1:5]
if len(struc.split()) != len(ipa.split()):
print(i+2, 'warn', struc, ' | ', ipa)
for l, n in zip(header[5:], line[5:]):
if n:
note = '' if 'X' else n
invs[l] += [[sis, ipa, struc, stype, note]]
ds.write_inventories(invs)
|
NicoVarg99/daf-recipes | ckan/ckan/ckan/ckan/tests/legacy/models/test_follower.py | Python | gpl-3.0 | 4,502 | 0 | # encoding: utf-8
import ckan.model as model
import ckan.lib.create_test_data as ctd
CreateTestData = ctd.CreateTestData
class FollowerClassesTests(object):
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
def test_get(self):
following = self.FOLLOWER_CLASS.get(self.follower.id, self.followee.id)
assert following.follower_id == self.follower.id, following
assert following.object_id == self.followee.id, following
def test_get_returns_none_if_couldnt_find_users(self):
following = self.FOLLOWER_CLASS.get('some-id', 'other-id')
assert following is None, following
def test_is_following(self):
assert self.FOLLOWER_CLASS.is_following(self.follower.id,
self.followee.id)
def test_is_following_returns_false_if_user_isnt_following(self):
assert not self.FOLLOWER_CLASS.is_following(self.followee.id,
self.follower.id)
def test_followee_count(self):
count = self.FOLLOWER_CLASS.followee_count(self.follower.id)
assert count == 1, count
def test_followee_list(self):
followees = self.FOLLOWER_CLASS.followee_list(self.follower.id)
object_ids = [f.object_id for f in followees]
assert object_ids == [self.followee.id], object_ids
def test_follower_count(self):
count = self.FOLLOWER_CLASS.follower_count(self.followee.id)
assert count == 1, count
def test_follower_list(self):
followers = self.FOLLOWER_CLASS.follower_list(self.followee.id)
follower_ids = [f.follower_id for f in followers]
assert follower_ids == [self.follower.id], follower_ids
class TestUserFollowingUser(FollowerClassesTests):
FOLLOWER_CLASS = model.UserFollowingUser
@classmethod
def setup_class(cls):
model.repo.rebuild_db()
cls.follower = CreateTestData.create_user('follower')
cls.followee = CreateTestData.create_user('followee')
cls.FOLLOWER_CLASS(cls.follower.id, cls.followee.id).save()
cls._create_deleted_models()
@classmethod
def _create_deleted_models(cls):
deleted_user = CreateTestData.create_user('deleted_user')
cls.FOLLOWER_CLASS(deleted_user.id, cls.followee.id).save()
cls.FOLLOWER_CLASS(cls.follower.id, deleted_user.id).save()
deleted_user.delete()
deleted_user.save()
class TestUserFollowingDataset(FollowerClassesTests):
FOLLOWER_CLASS = model.UserFollowingDataset
@classmethod
def setup_class(cls):
model.repo.rebuild_db()
cls.follower = CreateTestData.create_user('follower')
cls.followee = cls._create_dataset('followee')
cls.FOLLOWER_CLASS(cls.follower.id, cls.followee.id).save()
cls._create_deleted_models()
@classmethod
def _create_deleted_models(cls):
deleted_user = CreateTestData.create_user('deleted_user')
cls.FOLLOWER_CLASS(deleted_user.id, cls.followee.id).save()
deleted_user.delete()
deleted_user.save()
deleted_dataset = cls._create_dataset('deleted_dataset')
cls.FOLLOWER_CLASS(cls.follower.id, deleted_dataset.id).save()
deleted_dataset.delete()
deleted_dataset.save()
@classmethod
def _create_dataset(self, name):
CreateTestData.create_arbitrary({'name': name})
return model.Package.get(name)
class TestUserFollowingGroup(FollowerClassesTests):
FOLLOW | ER_CLASS = model.UserFollowingGroup
@classmethod
def setup_class(cls):
model.repo.rebuild_db()
model.repo.new_revision()
cls.follower = CreateTestData.create_user('follower')
cls.followee = cls._create_group('followee')
cls.FOLLOWER_CLASS(cls.follower.id, cls.followee.id).save()
cls._create_deleted_models()
model.repo.commit_and_remove() |
@classmethod
def _create_deleted_models(cls):
deleted_user = CreateTestData.create_user('deleted_user')
cls.FOLLOWER_CLASS(deleted_user.id, cls.followee.id).save()
deleted_user.delete()
deleted_user.save()
deleted_group = cls._create_group('deleted_group')
cls.FOLLOWER_CLASS(cls.follower.id, deleted_group.id).save()
deleted_group.delete()
deleted_group.save()
@classmethod
def _create_group(self, name):
group = model.Group(name)
group.save()
return group
|
olety/FBD | fbd/storage.py | Python | mit | 19,799 | 0.000253 | #!/usr/local/bin/python3
# STL imports
import datetime
import json
import logging
import pprint
# Package imports
import dateutil.parser
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (relationship, scoped_session, sessionmaker,
validates)
import fbd.tools
def default_json_serializer(obj):
'''
JSON serializer for storage objects not supported by the default package
'''
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if (isinstance(obj, Topic) or isinstance(obj, Place) or
isinstance(obj, Event)):
return obj.to_dict()
raise TypeError('{} type could not be serialized.'.format(type(obj)))
Base = declarative_base()
place_topic = sqlalchemy.Table(
'Place_Topic',
Base.metadata,
sqlalchemy.Column('place_id', sqlalchemy.String,
sqlalchemy.ForeignKey('Place.id')),
sqlalchemy.Column('topic_id', sql | alchemy.String,
sqlalchemy.ForeignKey('Topic.id')),
)
class Topic(Base):
__tablename__ = 'Topic'
@classmethod
def from_dict(cls, topic_dict):
return cls(id=topic_dict.get('id'), name=topic_dict.get('name'))
def to_json(self):
return json.dumps(
self.to_dict(),
default=default_json_serializer,
separators=(',', ':'),
)
def to_dict(self):
return {'id': self.id, 'name': self.name}
| id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String(100))
places = relationship('Place', secondary=place_topic)
@validates('name')
def validate_trunc(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
def __init__(self, id, name):
self.id = id
self.name = name
class Place(Base):
__tablename__ = 'Place'
@classmethod
def from_dict(cls, place_dict):
place_loc = place_dict.get('location', {})
topic_list = []
if place_dict.get('place_dict_topics'):
topic_list = [Topic.from_dict(topic_dict)
for topic_dict
in place_dict['place_dict_topics'].get('data')]
return cls(id=place_dict['id'],
topics=topic_list,
ptype=place_dict.get('place_dict_type', 'UNKNOWN'),
name=place_dict.get('name', 'Unnamed'),
city=place_loc.get('city', 'Wroclaw'),
country=place_loc.get('country', 'Poland'),
lat=place_loc.get('latitude', 0.0),
lon=place_loc.get('longitude', 0.0),
street=place_loc.get('street', 'Unknown'),
zip=place_loc.get('zip', '00-000'))
def to_json(self):
return json.dumps(
self.to_dict(),
default=default_json_serializer,
separators=(',', ':'),
)
def to_dict(self):
# IDEA: Add events=T/F flag?
# IDEA: Auto-generate fields?
return {
'id': self.id,
'name': self.name,
'ptype': self.ptype,
'topics': [topic.to_dict() for topic in self.topics],
'city': self.city,
'country': self.country,
'lat': self.lat,
'lon': self.lon,
'street': self.street,
'zip': self.zip,
}
id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String(100))
ptype = sqlalchemy.Column(sqlalchemy.String(10))
city = sqlalchemy.Column(sqlalchemy.String(25))
country = sqlalchemy.Column(sqlalchemy.String(25))
lat = sqlalchemy.Column(sqlalchemy.Float())
lon = sqlalchemy.Column(sqlalchemy.Float())
street = sqlalchemy.Column(sqlalchemy.String(100))
topics = relationship('Topic', secondary=place_topic, cascade='save-update')
zip = sqlalchemy.Column(sqlalchemy.String(6))
@validates('name', 'ptype', 'street', 'country', 'zip')
def validate_trunc(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
def __init__(self, id, name, topics, ptype, city, country, lat, lon, street,
zip):
self.id = id
self.name = name
self.ptype = ptype
self.topics = topics
self.city = city
self.country = country
self.lat = lat
self.lon = lon
self.street = street
self.zip = zip
def __repr__(self):
return '<Place {} - {}>'.format(self.id, self.name)
def __str__(self):
return '<Place {} - {}>'.format(self.id, self.name)
class Event(Base):
__tablename__ = 'Event'
@classmethod
def from_dict(cls, event_dict):
return cls(
id=event_dict['id'],
desc=event_dict.get('description', 'None'),
name=event_dict['name'],
picture_url=event_dict.get('picture', {})
.get('data', {}).get('url', 'None'),
ticket_url=event_dict.get('ticket_uri', 'None'),
place_id=event_dict.get['place_id'],
start_time=dateutil.parser.parse(
event_dict.get(
'start_time',
'2017-04-07T16:00:00+0200',
)),
)
def to_json(self):
return json.dumps(
self.to_dict(),
default=default_json_serializer,
separators=(',', ':'),
)
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'description': self.description,
'start_time': self.start_time,
'place_id': self.place_id,
'picture_url': self.picture_url,
'ticket_url': self.ticket_url,
}
id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True)
description = sqlalchemy.Column(sqlalchemy.String(10000))
name = sqlalchemy.Column(sqlalchemy.String(100))
picture_url = sqlalchemy.Column(sqlalchemy.String(150))
ticket_url = sqlalchemy.Column(sqlalchemy.String(150))
start_time = sqlalchemy.Column(sqlalchemy.DateTime)
place_id = sqlalchemy.Column(
sqlalchemy.String(50), sqlalchemy.ForeignKey('Place.id'))
place = relationship('Place', backref='events', foreign_keys=[place_id])
@validates('description', 'name')
def validate_trunc(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
@validates('picture_url', 'ticket_url')
def validate_strict(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return 'None'
return value
def __init__(self, id, desc, name, picture_url, ticket_url, start_time,
place_id):
self.id = id
self.name = name
self.description = desc
self.start_time = start_time
self.place_id = place_id
self.picture_url = picture_url
self.ticket_url = ticket_url
def __repr__(self):
return '<Event {} - {}>\n{}'.format(self.id, self.name,
pprint.pformat(self.to_dict()))
def __str__(self):
return pprint.pformat(self.to_dict())
# TODO: Implement 'Page' class
# class Page(Base):
# __tablename__ = 'Page'
# id = sqlalchemy.Column(sqlalchemy.String(50), primary_key=True)
# message = sqlalchemy.Column(sqlalchemy.String(10000))
# link = sqlalchemy.Column(sqlalchemy.String(150))
# created_time = sqlalchemy.Column(sqlalchemy.DateTime)
#
# like = sqlalchemy.Column(sqlalchemy.Integer())
# love = sqlalchemy.Column(sqlalchemy.Integer())
# haha = sqlalchemy.Column(sqlal |
jianwei1216/my-scripts | python_script/fast.py | Python | gpl-2.0 | 3,500 | 0.008 | #!/usr/bin/python
# new
import argparse
import sys
import os
import ssh
import time
sys.path.append('./')
from log import fastlog
def get_ssh_client(host):
global args
try:
client = ssh.SSHClient()
client.set_missing_host_key_policy(ssh.AutoAddPolicy())
client.connect(host, port=int(args.port), username='root', password=args.password)
ex | cept Exception, e:
print host, e
fastlog.error ("%s:%s" % (host, e))
return None
return client
def __despatch_cmd(host, cmd):
client = get_ssh_client(host)
if client == None:
return None
stdin, stdout, stderr = client.exec_command(cmd)
err = stderr.read()
out = stdout.read()
if len(out) > 0 | :
print host, out,
out = out[0:-1]
fastlog.info ("%s: %s" % (host, out))
if len(err) > 0:
print host, err,
err = err[0:-1]
fastlog.error ("%s: %s" % (host, err))
client.close()
def multi_fork(nodes, cmd):
pids = []
for host in nodes:
try:
pid = os.fork ()
if pid == 0:
# CHILD
__despatch_cmd (host, cmd)
exit ()
else:
pids.append(pid)
except Exception, e:
print e
fastlog.error ("%s" % e)
return None
for pid in pids:
try:
os.waitpid(pid, 0)
except Exception, e:
print e
fastlog.error ("%s" % e)
return None
def exec_commands():
global args
cmd = args.command
for arg in args.args:
cmd += ' ' + arg
multi_fork(args.nodes, cmd)
if __name__ == '__main__':
global args, mylog
config_file_path = os.path.expanduser('~') + '/.' + sys.argv[0].split('/')[1] + '.config'
parser = argparse.ArgumentParser()
parser.add_argument('--nodes', nargs='+', default=open(config_file_path, "ab+"))
parser.add_argument('-p', '--port', nargs='?', type=int, default='22')
parser.add_argument('--password', default=open(config_file_path, "ab+"))
parser.add_argument('command')
parser.add_argument('args', nargs=argparse.REMAINDER)
args = parser.parse_args()
if (type(args.nodes) == list and type(args.password) == file) or\
(type(args.nodes) == file and type(args.password) == list):
print 'Error: please give all args!'
exit(-1)
if type(args.nodes) == list and type(args.password) == str:
fp = open (config_file_path, "wb+")
fp.truncate(0)
fp.write(args.password + ' ')
for i in range(0, len(args.nodes)):
if i == len(args.nodes) - 1:
fp.write(args.nodes[i])
else:
fp.write(args.nodes[i] + ' ')
fp.write(' ' + str(args.port))
fp.close()
if type(args.nodes) == file and type(args.password) == file:
fp = open (config_file_path, "rb")
fargs = fp.readline()
if len(fargs) == 0:
print 'Fatal error: ./.fast.py.config is empty,'\
'please give all args!'
exit(-1)
args.password = fargs.split(' ')[0]
args.nodes = fargs.split(' ')[1:-1]
args.port = fargs.split(' ')[-1]
fp.close()
print args
fastlog.info ('>>>>>>>>>>>>>>>>NEW COMMANDS START>>>>>>>>>>>>>>\nnargs = %s' % args)
exec_commands()
fastlog.info ('<<<<<<<<<<<<<<<<NEW COMMANDS END<<<<<<<<<<<<<<<<\n\n')
exit()
|
nodakai/watchman | tests/integration/test_site_spawn.py | Python | apache-2.0 | 2,338 | 0.001283 | # vim:ts=4:sw=4:et:
# Copyright 2016-present Facebook, Inc.
# Licensed under the Apache License, Version 2.0
# no unicode literals
from __future__ import absolute_import, division, print_function
import json
import os
import pywatchman
import WatchmanInstance
try:
import unittest2 as unittest
except ImportError:
import unittest
WATCHMAN_SRC_DIR = os.environ.get("WATCHMAN_SRC_DIR", os.getcwd())
THIS_DIR = os.path.join(WATCHMAN_SRC_DIR, "tests", "integration")
@unittest.skipIf(os.name == "nt", "not supported on windows")
class TestSiteSpawn(unittest.TestCase):
if not pywatchman.compat.PYTHON3:
assertRegex = unittest.TestCase.assertRegexpMatches
def test_failingSpawner(self):
config = {
"spawn_watchman_service": os.path.join(THIS_DIR, "site_spawn_fail.py")
}
inst = WatchmanInstance.Instance(config=config)
stdout, stderr = inst.commandViaCLI(["version"])
print("stdout", stdout)
print( | "stderr", stderr)
stderr = stderr.decode("ascii")
self.assertEqual(b"", stdout)
self.assertRegex(stderr, "failed to start\n")
self.assertRegex(stderr, "site_spawn_fail.py: exited with status 1")
def test_no_site_spawner(self):
"""With a site spawner configured to otherwise fail, pass
`--no-site-spawner` and ensure that a failure didn't occur."""
config = {
"spawn_watchman_servi | ce": os.path.join(THIS_DIR, "site_spawn_fail.py")
}
inst = WatchmanInstance.Instance(config=config)
stdout, stderr = inst.commandViaCLI(["version", "--no-site-spawner"])
print(stdout, stderr.decode("ascii"))
parsed = json.loads(stdout.decode("ascii"))
self.assertTrue("version" in parsed)
inst.commandViaCLI(["--no-spawn", "--no-local", "shutdown-server"])
def test_spawner(self):
config = {"spawn_watchman_service": os.path.join(THIS_DIR, "site_spawn.py")}
inst = WatchmanInstance.Instance(config=config)
stdout, stderr = inst.commandViaCLI(["version"])
parsed = json.loads(stdout.decode("ascii"))
self.assertTrue("version" in parsed)
# Shut down that process, as we have no automatic way to deal with it
inst.commandViaCLI(["--no-spawn", "--no-local", "shutdown-server"])
|
mchrzanowski/ProjectEuler | src/python/Problem030.py | Python | mit | 579 | 0.008636 | '''
Created on Jan 11, 2012
@author: mchrzanowski
'''
__LIMIT = 999999 # as 999999 > 6 * 9 ^ 5 , this is our ceiling.
__POWER = 5
def main():
winners = set([])
for number in xrange(2, __LIMIT): # exclude 1 as it's not a sum (but would qua | lify)
numberString = str(number)
addedValue = 0
for numeral in numberString:
addedValue = addedValue + int(numeral) ** __POWER
if addedValue == number:
winners.add(number)
print "Numbers: ", winners
prin | t "Sum: ", sum(winners)
if __name__ == '__main__':
main() |
martbhell/wasthereannhlgamelastnight | src/lib/google/oauth2/service_account.py | Python | mit | 25,866 | 0.000464 | # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
This module implements the JWT Profile for OAuth 2.0 Authorization Grants
as defined by `RFC 7523`_ with particular support for how this RFC is
implemented in Google's infrastructure. Google refers to these credentials
as *Service Accounts*.
Service accounts are used for server-to-server communication, such as
interactions between a web application server and a Google service. The
service account belongs to your application instead of to an individual end
user. In contrast to other OAuth 2.0 profiles, no users are involved and your
application "acts" as the service account.
Typically an application uses a service account when the application uses
Google APIs to work with its own data rather than a user's data. For example,
an application that uses Google Cloud Datastore for data persistence would use
a service account to authenticate its calls to the Google Cloud Datastore API.
However, an application that needs to access a user's Drive documents would
use the normal OAuth 2.0 profile.
Additionally, Google Apps domain administrators can grant service accounts
`domain-wide delegation`_ authority to access user data on behalf of users in
the domain.
This profile uses a JWT to acquire an OAuth 2.0 access token. The JWT is used
in place of the usual authorization token returned during the standard
OAuth 2.0 Authorization Code grant. The JWT is only used for this purpose, as
the acquired access token is used as the bearer token when making requests
using these credentials.
This profile differs from normal OAuth 2.0 profile because no user consent
step is required. The use of the private key allows this profile to assert
identity directly.
This profile also differs from the :mod:`google.auth.jwt` authentication
because the JWT credentials use the JWT directly as the bearer token. This
profile instead only uses the JWT to obtain an OAuth 2.0 access token. The
obtained OAuth 2.0 access token is used as the bearer token.
Domain-wide delegation
----------------------
Domain-wide delegation allows a service account to access user data on
behalf of any user in a Google Apps domain without consent from the user.
For example, an application that uses the Google Calendar API to add events to
the calendars of all users in a Google Apps domain would use a service account
to access the Google Calendar API on behalf of users.
The Google Apps administrator must explicitly authorize the service account to
do this. This authorization step is referred to as "delegating domain-wide
authority" to a service account.
You can use domain-wise delegation by creating a set of credentials with a
specific subject using :meth:`~Credentials.with_subject`.
.. _RFC 7523: https://tools.ietf.org/html/rfc7523
"""
import copy
import datetime
from google.auth import _helpers
from google.auth import _service_account_info
from google.auth import credentials
from google.auth import jwt
from google.oauth2 import _client
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
_GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
class Credentials(
credentials.Signing, credentials.Scoped, credentials.CredentialsWithQuotaProject
):
"""Service account credentials
Usually, you'll create these credentials with one of the helper
constructors. To create credentials using a Google service account
private key JSON file::
credentials = service_account.Credentials.from_service_account_file(
'service-account.json')
Or if you already have the service account file loaded::
service_account_info = json.load(open('service_account.json'))
credentials = service_account.Credentials.from_service_account_info(
service_account_info)
Both helper methods pass on arguments to the constructor, so you can
specify additional scopes and a subject if necessary::
credentials = service_account.Credentials.from_service_account_file(
'service-account.json',
scopes=['email'],
subject='user@example.com')
The credentials are considered immutable. If you want to modify the scopes
or the subject used for delegation, use :meth:`with_scopes` or
:meth:`with_subject`::
scoped_credentials = credentials.with_scopes(['email'])
delegated_credentials = credentials.with_subject(subject)
To add a quota project, use :meth:`with_quota_project`::
credentials = credentials.with_quota_project('myproject-123')
"""
def __init__(
self,
signer,
service_account_email,
token_uri,
scopes=None,
default_scopes=None,
subject=None,
project_id=None,
quota_project_id=None,
additional_claims=None,
always_use_jwt_access=False,
):
"""
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
service_account_email (str): The service account's email.
scopes (Sequence[str]): User-defined scopes to request during the
authorization grant.
default_scopes (Sequence[str]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
token_uri (str): The OAuth 2.0 Token URI.
subject (str): For domain-wide delegation, the email address of the
user to for which to request delegated access.
project_id (str): Project ID associated with the service account
credential.
quota_project_id (Optional[str]): The project ID used for quota and
billing.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT assertion used in the authorization grant.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be always used.
.. note:: Typically one of the helper constructors
:meth:`from_service_account_file` or
:meth:`from_service_account_info` are used instead of calling the
constructor directly.
"""
super(Credentials, self).__init__()
self._scopes = scopes
self._default_scopes = default_scopes
self._signer = signer
self._service_account_email = service_account_email
self._subject = subject
self._project_id = project_id
self._quota_project_id = quota_project_id
self._token_uri = token_uri
self._always_use_jwt_access = always_use_jwt_access
self._jwt_credentials = None
if additional_claims is not None:
self._additional_claims = additional_claims
else:
self._additional_claims = {}
@classmetho | d
def _from_signer_and_info(cls, signer, | info, **kwargs):
"""Creates a Credentials instance from a signer and service account
info.
Args:
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
info (Mapping[str, str]): The service account info.
kwargs: Additional arguments to pass to the constructor.
Returns:
google.auth.jwt.Credentials: The constructed credentials.
Raises:
ValueError: If the info is not in the expected format.
"""
return cls(
signer,
service_account_email=info["client_email"],
token_uri=info["token_uri"],
project_id=info.get("project_id"),
**kwa |
theju/dtwt | accounts/models.py | Python | mit | 160 | 0 | from django.db import models
from django.con | trib.auth.models import AbstractUser
class Us | er(AbstractUser):
phone_number = models.CharField(max_length=15)
|
MrMasterBaiter/entropy-ai | entropy server/server/server.py | Python | gpl-2.0 | 3,332 | 0.032713 | # main.py: Provides the server client infra for gaming
# python2 server.py <port> <max_clients>
# YES YOU CAN ONLY USE PYTHON "2" and NOT "3".
import os, sys, socket, signal, game, json, humanSocket
if (os.path.realpath('../utils') not in sys.path):
sys.path.insert(0, os.path.realpath('../utils'))
from log import *
DEBUG = False
connections = []
config = {}
sock = None
def loadConfig():
global config
f = open('config.txt', 'r')
lines = f.readlines()
for line in lines:
line = line.strip()
if (len(line) == 0 or line.startswith('#')):
continue
keyVal = list( map(lambda x: x.strip(), line.split(':') ) )
config[keyVal[0]] = keyVal[1]
# override config with sys args:
if (len(sys.argv) > 1):
config['port'] = int(sys.argv[1])
if (len(sys.argv) > 2):
config['max_clients'] = sys.argv[2]
# TODO: | Some keys are REQUIRED. maybe check that they are present.
print ('Server config:', config)
def letClientsJoin():
global connections, sock
port = int(config.get('port'))
hostname = '0.0.0.0'
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((hostname, port))
sock.listen(1)
print ('Listening on %s:%d for clients | for 20s' % (hostname, port))
signal.signal(signal.SIGALRM, lambda x: (_ for _ in ()).throw(Exception('TimeOut')) )
signal.alarm(20)
try:
while True:
# Wait for connections
print 'Waiting'
conn, addr = sock.accept()
botname = conn.recv(100) # a client must return information...
# conn.setblocking(1)
print('Bot ' + botname + ' connected')
connections.append((conn, addr, botname))
if (len(connections) >= int(config.get('max_clients') or 20) ):
raise Exception('TimeOut')
except:
print (sys.exc_info()[0])
pass
signal.alarm(0)
print('Timed out for joining...')
def printMenu():
Warn('-----------------Tournament Manager-----------------')
print('Bots connected: ')
i = 0
for (conn, addr, botname) in connections:
print (str(i) + '. ' + botname + ' ' + repr(addr))
i += 1
print(bcolors.OKYELLOW + 'command examples: ' + bcolors.ENDC + \
'"1 v 2", "1 v 2 v 4" (league - N.Y.S), \
\n\tuse "h" as an index for human e.g "0 v h" \
\n\tq for quitting (close clients first)')
def quit():
for c in connections:
c[0].close()
sock.close()
exit(0)
# can throw exception
def getPlayer(name_or_index):
name_or_index = name_or_index.strip()
if (DEBUG):
Info('|' + name_or_index + '|')
if (name_or_index == 'h'):
return (humanSocket.HumanSocket(), -1, 'human')
return connections[int(name_or_index)]
def beginGameGod():
while True:
printMenu()
cmd = raw_input(bcolors.OKRED + "At your command: " + bcolors.ENDC)
cmd = cmd.strip()
if (cmd.startswith('q')):
print ('quitting')
quit()
elif ('v' in cmd):
try:
players = list(map(lambda x: getPlayer(x), cmd.split('v')))
except:
Error('Bad Input')
continue
game.startWar(players[0], players[1])
if __name__ == "__main__":
loadConfig()
letClientsJoin() # TODO: upgrade to letClientsJoinForever - threading thingy.
beginGameGod()
# Maybe useful:
# Async input acceptance for a lively server.
# import select
# try:
# if select.select([sys.stdin,], [], [], 0)[0]:
# thisChar = sys.stdin.read(1)
# else:
# print ('went here')
# except IOError:
# #nothing to read. should really have checked first
# pass |
vlegoff/tsunami | src/secondaires/navigation/equipage/postes/voilier.py | Python | bsd-3-clause | 1,769 | 0.000566 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVE | R CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF TH | E
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le poste voilier."""
from . import Poste
class Voilier(Poste):
"""Classe définissant le poste voilier."""
nom = "voilier"
autorite = 15
nom_parent = "officier"
|
google/google-ctf | 2021/quals/kctf/challenge-templates/pwn/healthcheck/healthcheck.py | Python | apache-2.0 | 1,160 | 0.001724 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pwnlib
def handle_pow(r):
print(r.recvuntil(b'python3 ') | )
print(r.recvuntil(b' solve '))
challenge = r.recvline().decode('ascii').strip()
p = pwnlib.tubes.process.process(['kctf_bypass_pow', challenge])
solution = p.readall().strip()
| r.sendline(solution)
print(r.recvuntil(b'Correct\n'))
r = pwnlib.tubes.remote.remote('127.0.0.1', 1337)
print(r.recvuntil('== proof-of-work: '))
if r.recvline().startswith(b'enabled'):
handle_pow(r)
print(r.recvuntil(b'CTF{'))
print(r.recvuntil(b'}'))
exit(0)
|
alexryndin/ambari | ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py | Python | apache-2.0 | 16,060 | 0.03655 | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
import os
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
import datetime, sys, socket
import resource_management.libraries.functions
@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
@patch("socket.socket")
@patch("time.time", new=MagicMock(return_value=1431110511.43))
class TestServiceCheck(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
STACK_VERSION = "2.0.6"
DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
def test_service_check_default(self, socket_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
classname="HiveServiceCheck",
command="service_check",
config_file="default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;transportMode=binary;auth=noSasl' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
user = 'ambari-qa',
timeout = 30,
timeout_kill_strategy = 2,
)
self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
content = StaticFile('hcatSmoke.sh'),
mode = 0755,
)
self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare false',
logoutput = True,
path = ['/usr/sbin',
'/usr/local/bin',
'/bin',
'/usr/bin',
'/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
tries = 3,
user = 'ambari-qa',
try_sleep = 5,
)
self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
conf_dir = '/etc/hadoop/conf',
logoutput = True,
user = 'hdfs',
bin_dir = '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin',
)
self.assertResourceCalled('Ex | ecute', ' /tmp/hcatSmok | e.sh hcatsmoke cleanup false',
logoutput = True,
path = ['/usr/sbin',
'/usr/local/bin',
'/bin',
'/usr/bin',
'/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
tries = 3,
user = 'ambari-qa',
try_sleep = 5,
)
self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
content = StaticFile('templetonSmoke.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
owner="hdfs"
)
self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/bin',
keytab = UnknownConfigurationMock(),
source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = 'missing_principal',
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/bin',
keytab = UnknownConfigurationMock(),
source = '/etc/passwd',
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = 'missing_principal',
user = 'hdfs',
dfs_type = '',
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/bin',
keytab = UnknownConfigurationMock(),
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
kinit_path_local = '/usr/bin/kinit',
principal_name = 'missing_principal',
user = 'hdfs',
dfs_type = '',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig no_keytab false /usr/bin/kinit no_principal /tmp',
logoutput = True,
path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
tries = 3,
try_sleep = 5,
)
self.assertNoMoreResources()
def test_service_check_secured(self, socket_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
classname="HiveServiceCheck",
command="service_check",
config_file="secured.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; ',
user = 'ambari-qa',
)
self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10000/;transportMode=binary;principal=hive/_HOST@EXAMPLE.COM' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
user = 'ambari-qa',
timeout = 30,
timeout_kill_strategy = 2,
)
self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
content = StaticFile('hcatSmoke.sh'),
mode = 0755,
)
self.maxDiff = None
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare false',
logoutput = True,
|
SolusOS-discontinued/RepoHub | profiles/views.py | Python | mit | 785 | 0.03949 | # Create your views here.
from django.http import HttpRe | sponse
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.models import User
from repository.models import PisiPackage
def show_user (request, name=None):
user = get_object_or_404 (User, username=name)
context = { 'user' : user }
packages = None
try:
packages = PisiPackage.objects.filter(known_user=user).order_by("-date_u | pdated")
count = len(packages)
total_packages = len(PisiPackage.objects.all())
pct = float (float(count) / (total_packages)) * 100
packages = packages[:7]
context = { 'user': user, 'package_count': count, 'package_ratio': pct, 'packages': packages}
except Exception, e:
print e
pass
return render (request, "profiles/individ.html", context)
|
gnu-sandhi/sandhi | modules/gr36/grc/examples/xmlrpc/xmlrpc_client_script.py | Python | gpl-3.0 | 463 | 0.034557 | # | !/usr/bin/env python
import time
import random
import xmlrpclib
#create server object
s = xmlrpclib.Server("http://localhost:1234")
#randomly change parameters of the sinusoid
for i in range(10):
#generate random values
new_freq = random.uniform(0, 5000)
new_ampl = random.uniform(0, 2)
new_offset = random.uniform(-1, 1)
#set new values
time.sleep(1)
s.set_freq(new_freq)
time.sleep(1)
s.set | _ampl(new_ampl)
time.sleep(1)
s.set_offset(new_offset)
|
CCI-MOC/GUI-Backend | core/migrations/0041_user_ssh_keys.py | Python | apache-2.0 | 1,036 | 0.001931 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import uuid
class Migration(migrations.Migration):
dependencies = [
('cor | e', '0040_auto_add_status_types'),
]
operations = [
migrations.CreateModel(
name='SSHKey',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary | _key=True)),
('name', models.CharField(max_length=256)),
('uuid', models.CharField(default=uuid.uuid4, unique=True, max_length=36)),
('pub_key', models.TextField()),
('atmo_user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'ssh_key',
},
),
migrations.AddField(
model_name='userprofile',
name='use_ssh_keys',
field=models.BooleanField(default=False),
),
]
|
archivsozialebewegungen/AlexandriaBase | alexandriabase/tools.py | Python | gpl-3.0 | 5,581 | 0.008067 | '''
Created on 15.11.2021
@author: michael
'''
from alexandriabase.daos import DocumentDao, DaoModule, DOCUMENT_TABLE,\
DocumentFileInfoDao
from injector import Injector, inject
from alexandriabase import AlexBaseModule
from alexandriabase.services import ServiceModule, DocumentFileManager,\
DocumentFileNotFound, THUMBNAIL, FileProvider, ReferenceService
from sqlalchemy.sql.expression import or_, and_
from alexandriabase.base_exceptions import NoSuchEntityException
from datetime import date
from os.path import exists
import re
def tex_sanitizing(text: str) -> str:
text = text.replace("&", "\\&")
text = text.replace("#", "\\#")
return text
class PlakatExporter:
@inject
def __init__(self, dao: DocumentDao,
file_info_dao: DocumentFileInfoDao,
file_manager: DocumentFileManager,
| file_provider: | FileProvider,
reference_service: ReferenceService):
self.dao = dao
self.file_info_dao = file_info_dao
self.file_manager = file_manager
self.file_provider = file_provider
self.reference_service = reference_service
self.titel = "Plakate im ASB"
def export_to_tex(self):
self.open_file()
for record in self.fetch_records():
events = self.reference_service.get_events_referenced_by_document(record)
self.print_record(record, events)
self.close_file()
def print_record(self, record, events):
if self.filtered(record, events):
return
self.file.write("\n\n\\section*{Dokumentnr. %d}" % record.id)
self.file.write("\n\nBeschreibung: %s" % tex_sanitizing(record.description))
if record.condition is not None and record.condition.strip() != "":
self.file.write("\n\nZusätzliche Infos: %s" % tex_sanitizing(record.condition))
self.print_events(events)
self.print_img(record.id)
def fetch_records(self):
condition = DOCUMENT_TABLE.c.doktyp == 9
return self.dao.find(condition)
def filtered(self, record, events):
return False
def print_events(self, events):
if len(events) == 0:
return
if len(events) == 1:
self.file.write("\n\n\\subsection*{Verknüpftes Ereignis}")
else:
self.file.write("\n\n\\subsection*{Verknüpfte Ereignisse}")
for event in events:
self.file.write("\n\n%s: %s" % (event.daterange, tex_sanitizing(event.description)))
def print_img(self, id):
try:
file_info = self.file_info_dao.get_by_id(id)
file_name = self.file_manager.get_generated_file_path(file_info, THUMBNAIL)
if not exists(file_name):
print("Generating file %s" % file_name)
self.file_provider.get_thumbnail(file_info)
self.file.write("\n\n\\vspace{0.5cm}")
self.file.write("\n\n\\includegraphics[width=7.0cm]{%s}\n" % file_name)
except NoSuchEntityException:
self.file.write("\n\nEintrag nicht gefunden!")
except DocumentFileNotFound:
self.file.write("\n\nDokumentdatei nicht gefunden!")
except OSError as e:
print(e)
print("Error on document %d" % id)
def open_file(self):
self.file = open("/tmp/plakate.tex", "w")
self.file.write("\\documentclass[german, a4paper, 12pt, twocolums]{article}\n")
self.file.write("\\usepackage[utf8]{inputenc}\n")
self.file.write("\\usepackage[T1]{fontenc}\n")
self.file.write("\\usepackage{graphicx}\n")
self.file.write("\\setlength{\\parindent}{0cm}\n")
self.file.write("\\special{papersize=29.7cm,21cm}\n")
self.file.write("\\usepackage{geometry}\n")
self.file.write("\\geometry{verbose,body={29.7cm,21cm},tmargin=1.5cm,bmargin=1.5cm,lmargin=1cm,rmargin=1cm}\n")
self.file.write("\\begin{document}\n")
self.file.write("\\sloppy\n")
self.file.write("\\title{%s}\n" % self.titel)
self.file.write("\\author{Archiv Soziale Bewegungen e.V.}\n")
self.file.write("\\date{Stand: %s}\n" % date.today())
self.file.write("\\maketitle\n\n")
self.file.write("\\twocolumn\n\n")
def close_file(self):
self.file.write("\\end{document}\n")
self.file.close()
class FemPlakatExporter(PlakatExporter):
def open_file(self):
self.titel = "Plakate zur Neuen Frauenbewegung\\linebreak{}(vor 1990 oder Entstehung nicht bestimmt)"
PlakatExporter.open_file(self)
#def filtered(self, record, events):
# if record.condition is not None and re.compile(r".*(199\d|20\d\d).*").match(record.condition):
# return True
# if len(events) == 0:
# return False
# for event in events:
# if event.id < 1990000000:
# return False
# return True
def fetch_records(self):
condition = and_(DOCUMENT_TABLE.c.doktyp == 9,
or_(DOCUMENT_TABLE.c.standort.like("7%"),
DOCUMENT_TABLE.c.standort.like("23%")))
return self.dao.find(condition)
if __name__ == '__main__':
injector = Injector([AlexBaseModule, DaoModule, ServiceModule])
exporter = injector.get(FemPlakatExporter)
exporter.export_to_tex() |
Greymerk/python-rpg | src/player/__init__.py | Python | gpl-3.0 | 26 | 0 | from | player import Player
| |
Johnetordoff/osf.io | addons/zotero/views.py | Python | apache-2.0 | 2,789 | 0.001076 | # -*- coding: utf-8 -*-
from flask import request
from .provider import ZoteroCitationsProvider
from website.citations.views import GenericCitationViews
from website.project.decorators import (
must_have_addon, must_be_addon_authorizer,
must_have_permission, must_not_be_registration,
must_be_contributor_or_public
)
from api.base.utils import is_truthy
from osf.utils.permissions import WRITE
class ZoteroViews(GenericCitationViews):
def set_config(self):
addon_short_name = self.addon_short_name
Provider = self.Provider
@must_not_be_registration
@must_have_addon(addon_short_name, 'user')
@must_have_addon(addon_short_name, 'node')
@must_be_addon_authorizer(addon_short_name)
@must_have_permission(WRITE)
def _set_config(node_addon, user_addon, auth, **kwargs):
""" Changes folder associated with addon.
Returns serialized node settings
"""
provider = Provider()
args = request.get_json()
external_list_id = args.get('external_list_id')
external_list_name = args.get('external_list_name')
external_library_id = args.get('external_library_id', None)
external_library_name = args.get('external_library_name', None)
provider.set_config(
node_addon,
auth.user,
external_list_id,
external_list_name,
auth,
external_library_id,
external_library_name
)
return {
'result': provider.serializer(
node_settings=node | _addon,
user_settings=auth.user.get_addon(addon_short_name),
).serialized_node_settings
}
_set | _config.__name__ = '{0}_set_config'.format(addon_short_name)
return _set_config
def library_list(self):
addon_short_name = self.addon_short_name
@must_be_contributor_or_public
@must_have_addon(addon_short_name, 'node')
def _library_list(auth, node_addon, **kwargs):
""" Returns a list of group libraries - for use with Zotero addon
"""
limit = request.args.get('limit')
start = request.args.get('start')
return_count = is_truthy(request.args.get('return_count', False))
append_personal = is_truthy(request.args.get('append_personal', True))
return node_addon.get_folders(limit=limit, start=start, return_count=return_count, append_personal=append_personal)
_library_list.__name__ = '{0}_library_list'.format(addon_short_name)
return _library_list
zotero_views = ZoteroViews('zotero', ZoteroCitationsProvider)
|
etingof/pyasn1-modules | tests/test_rfc6402.py | Python | bsd-2-clause | 6,395 | 0.001095 | #
# This file is part of pyasn1-modules software.
#
# Created by Russ Housley
# Copyright (c) 2019, Vigil Security, LLC
# License: http://snmplabs.com/pyasn1/license.html
#
import sys
import unittest
from pyasn1.codec.der.decoder import decode as der_decoder
from pyasn1.codec.der.encoder import encode as der_encoder
from pyasn1.type import char
from pyasn1.type import namedtype
from pyasn1.type import univ
from pyasn1_modules import pem
from pyasn1_modules import rfc5652
from pyasn1_modules import rfc6402
class BackwardCompatibilityTestCase(unittest.TestCase):
pem_text = """\
MIIEJQYJKoZIhvcNAQcCoIIEFjCCBBICAQMxCzAJBgUrDgMCGgUAMIIDAgYIKwYBBQUHDAKgggL0
BIIC8DCCAuwweDB2AgECBgorBgEEAYI3CgoBMWUwYwIBADADAgEBMVkwVwYJKwYBBAGCNxUUMUow
SAIBBQwZcGl0dWNoYTEuZW1lYS5ocHFjb3JwLm5ldAwMRU1FQVxwaXR1Y2hhDBpDTUNSZXFHZW5l
cmF0b3IudnNob3N0LmV4ZTCCAmqgggJmAgEBMIICXzCCAcgCAQAwADCBnzANBgkqhkiG9w0BAQEF
AAOBjQAwgYkCgYEA0jm7SSSm2wyEAzuNKtFZFJKo91SrJq9wQwEhEKHDavZwMQOm1rZ2PF8NWCEb
PqrhToQ7rtiGLSZa4dF4bzgmBqQ9aoSfEX4jISt31Vy+skHidXjHHpbsjT24NPhrZgANivL7CxD6
Ft+s7qS1gL4HRm2twQkqSwOLrE/q2QeXl2UCAwEAAaCCAR0wGgYKKwYBBAGCNw0CAzEMFgo2LjIu
OTIwMC4yMD4GCSqGSIb3DQEJDjExMC8wHQYDVR0OBBYEFMW2skn88gxhONWZQA4sWGBDb68yMA4G
A1UdDwEB/wQEAwIHgDBXBgkrBgEEAYI3FRQxSjBIAgEFDBlwaXR1Y2hhMS5lbWVhLmhwcWNvcnAu
bmV0DAxFTUVBXHBpdHVjaGEMGkNNQ1JlcUdlbmVyYXRvci52c2hvc3QuZXhlMGYGCisGAQQBgjcN
AgIxWDBWAgECHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABv
AGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIDAQAwDQYJKoZIhvcNAQEFBQADgYEAJZlu
mxjtCxSOQi27jsVdd3y8NSIlzNv0b3LqmzvAly6L+CstXcnuG2MPQqPH9R7tbJonGUniBQO9sQ7C
KhYWj2gfhiEkSID82lV5chINVUFKoUlSiEhWr0tPGgvOaqdsKQcrHfzrsBbFkhDqrFSVy7Yivbnh
qYszKrOjJKiiCPMwADAAMYH5MIH2AgEDgBTFtrJJ/PIMYTjVmUAOLFhgQ2+vMjAJBgUrDgMCGgUA
oD4wFwYJKoZIhvcNAQkDMQoGCCsGAQUFBwwCMCMGCSqGSIb3DQEJBDEWBBTFTkK/OifaFjwqHiJu
xM7qXcg/VzANBgkqhkiG9w0BAQEFAASBgKfC6jOi1Wgy4xxDCQVK9+e5tktL8wE/j2cb9JSqq+aU
5UxEgXEw7q7BoYZCAzcxMRriGzakXr8aXHcgkRJ7XcFvLPUjpmGg9SOZ2sGW4zQdWAwImN/i8loc
xicQmJP+VoMHo/ZpjFY9fYCjNZUArgKsEwK/s+p9yrVVeB1Nf8Mn
"""
def testDerCodec(self):
layers = { }
layers.update(rfc5652.cmsContentTypesMap)
getNextLayer = {
rfc5652.id_ct_contentInfo: lambda x: x['contentType'],
rfc5652.id_signedData: lambda x: x['encapContentInfo']['eContentType'],
rfc6402.id_cct_PKIData: lambda x: None
}
getNextSubstrate = {
rfc5652.id_ct_contentInfo: lambda x: x['content'],
rfc5652.id_signedData: lambda x: x['encapContentInfo']['eContent'],
rfc6402.id_cct_PKIData: lambda x: None
}
substrate = pem.readBase64fromText(self.pem_text)
next_layer = rfc5652.id_ct_contentInfo
while next_layer:
asn1Object, rest = der_decoder(substrate, asn1Spec=layers[next_layer])
self.assertFalse(rest)
self.assertTrue(asn1Object.prettyPrint())
self.assertEqual(substrate, der_encoder(asn1Object))
substrate = getNextSubstrate[next_layer](asn1Object)
next_layer = getNextLaye | r[next_layer](asn1Object)
def testOpenTypes(self):
class ClientInformation(univ.Sequence):
pass
ClientInformation.componentType = namedtype.NamedTypes(
namedtype.NamedType('clientId', univ.Integer()),
namedtype.NamedType('MachineName', char.UTF8String()),
namedtype.NamedType('UserName', char.UTF8String()),
namedtype.NamedType('ProcessName', char.UTF8String())
)
class EnrollmentCSP( | univ.Sequence):
pass
EnrollmentCSP.componentType = namedtype.NamedTypes(
namedtype.NamedType('KeySpec', univ.Integer()),
namedtype.NamedType('Name', char.BMPString()),
namedtype.NamedType('Signature', univ.BitString())
)
openTypeMap = {
# attributes
univ.ObjectIdentifier('1.3.6.1.4.1.311.13.2.3'): char.IA5String(),
univ.ObjectIdentifier('1.3.6.1.4.1.311.13.2.2'): EnrollmentCSP(),
univ.ObjectIdentifier('1.3.6.1.4.1.311.21.20'): ClientInformation(),
# algorithm identifier parameters
univ.ObjectIdentifier('1.2.840.113549.1.1.1'): univ.Null(""),
univ.ObjectIdentifier('1.2.840.113549.1.1.5'): univ.Null(""),
univ.ObjectIdentifier('1.2.840.113549.1.1.11'): univ.Null(""),
}
openTypeMap.update(rfc5652.cmsAttributesMap)
openTypeMap.update(rfc6402.cmcControlAttributesMap)
substrate = pem.readBase64fromText(self.pem_text)
asn1Object, rest = der_decoder(
substrate, asn1Spec=rfc5652.ContentInfo(), decodeOpenTypes=True)
self.assertFalse(rest)
self.assertTrue(asn1Object.prettyPrint())
self.assertEqual(substrate, der_encoder(asn1Object))
eci = asn1Object['content']['encapContentInfo']
self.assertEqual(rfc6402.id_cct_PKIData, eci['eContentType'])
substrate = eci['eContent']
asn1Object, rest = der_decoder(
substrate, asn1Spec=rfc6402.PKIData(), openTypes=openTypeMap,
decodeOpenTypes=True)
self.assertFalse(rest)
self.assertTrue(asn1Object.prettyPrint())
self.assertEqual(substrate, der_encoder(asn1Object))
for req in asn1Object['reqSequence']:
cr = req['tcr']['certificationRequest']
sig_alg = cr['signatureAlgorithm']
self.assertIn(sig_alg['algorithm'], openTypeMap)
self.assertEqual(univ.Null(""), sig_alg['parameters'])
cri = cr['certificationRequestInfo']
spki_alg = cri['subjectPublicKeyInfo']['algorithm']
self.assertIn(spki_alg['algorithm'], openTypeMap)
self.assertEqual(univ.Null(""), spki_alg['parameters'])
attrs = cr['certificationRequestInfo']['attributes']
for attr in attrs:
self.assertIn( attr['attrType'], openTypeMap)
if attr['attrType'] == univ.ObjectIdentifier('1.3.6.1.4.1.311.13.2.3'):
self.assertEqual("6.2.9200.2", attr['attrValues'][0])
else:
self.assertTrue(attr['attrValues'][0].hasValue())
suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite)
|
PythonCharmers/orange3 | Orange/widgets/gui.py | Python | gpl-3.0 | 126,299 | 0.000515 | import math
import os
import re
import itertools
from types import LambdaType
import pkg_resources
import numpy
from PyQt4 import QtGui, QtCore, QtWebKit
from PyQt4.QtCore import Qt, pyqtSignal as Signal
from PyQt4.QtGui import QCursor, QApplication
import Orange.data
from Orange.widgets.utils import getdeepattr
from Orange.data import ContinuousVariable, StringVariable, DiscreteVariable, Variable
from Orange.widgets.utils import vartype
from Orange.widgets.utils.constants import CONTROLLED_ATTRIBUTES, ATTRIBUTE_CONTROLLERS
from Orange.util import namegen
YesNo = NoYes = ("No", "Yes")
_enter_icon = None
__re_label = re.compile(r"(^|[^%])%\((?P<value>[a-zA-Z]\w*)\)")
OrangeUserRole = itertools.count(Qt.UserRole)
LAMBDA_NAME = namegen('_lambda_')
def resource_filename(path):
"""
Return a resource filename (package data) for path.
"""
return pkg_resources.resource_filename(__name__, path)
class TableWidget(QtGui.QTableWidget):
""" An easy to use, row-oriented table widget """
ROW_DATA_ROLE = QtCore.Qt.UserRole + 1
ITEM_DATA_ROLE = ROW_DATA_ROLE + 1
class TableWidgetNumericItem(QtGui.QTableWidgetItem):
"""TableWidgetItem that sorts numbers correctly!"""
def __lt__(self, other):
return (self.data(TableWidget.ITEM_DATA_ROLE) <
other.data(TableWidget.ITEM_DATA_ROLE))
def selectionChanged(self, selected:[QtGui.QItemSelectionRange], deselected:[QtGui.QItemSelectionRange]):
"""Override or monkey-patch this method to catch selection changes"""
super().selectionChanged(selected, deselected)
def __setattr__(self, attr, value):
"""
The following selectionChanged magic ensures selectionChanged
slot, when monkey-patched, always calls the super's selectionChanged
first (--> avoids Qt quirks), and the user needs not care about that.
"""
if attr == 'selectionChanged':
func = value
@QtCore.pyqtSlot(QtGui.QItemSelection, QtGui.QItemSelection)
def _f(selected, deselected):
super(self.__class__, self).selectionChanged(selected, deselected)
func(selected, deselected)
value = _f
self.__dict__[attr] = value
def _update_headers(func):
"""Decorator to update certain table features after method calls"""
def _f(self, *args, **kwargs):
func(self, *args, **kwargs)
if self.col_labels is not None:
self.setHorizontalHeaderLabels(self.col_labels)
if self.row_labels is not None:
self.setVerticalHeaderLabels(self.row_labels)
if self.stretch_last_section:
self.horizontalHeader().setStretchLastSection(True)
return _f
@_update_headers
def __init__(self,
parent=None,
col_labels=None,
row_labels=None,
stretch_last_section=True,
multi_selection=False,
select_rows=False):
"""
Parameters
----------
parent: QObject
Parent QObject. If parent has layout(), this widget is added to it.
col_labels: list of str
Labels or [] (sequential numbers) or None (no horizontal header)
row_label: list_of_str
Labels or [] (sequential numbers) or None (no vertical header)
stretch_last_section: bool
multi_selection: bool
Single selection if False
select_rows: bool
If True, select whole rows instead of individual cells.
"""
super().__init__(parent)
self._column_filter = {}
self.col_labels = col_labels
self.row_labels = row_labels
self.stretch_last_section = stretch_last_section
try: parent.layout().addWidget(self)
except (AttributeError, TypeError): pass
if col_labels is None:
self.horizontalHeader().setVisible(False)
if row_labels is None:
self.verticalHeader().setVisible(False)
if multi_selection:
self.setSelectionMode(self.MultiSelection)
if select_rows:
self.setSelectionBehavior(self.SelectRows)
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setEditTriggers(self.NoEditTriggers)
self.setAlternatingRowColors(True)
self.setShowGrid(False)
self.setSortingEnabled(True)
@_update_headers
def addRow(self, items:tuple, data=None):
"""
Appends iterable of `items` as the next row, optionally setting row
data to `data`. Each item of `items` can be a string or tuple
(item_name, item_data) if individual, cell-data is required.
"""
row_data = data
row = self.rowCount()
self.insertRow(row)
| col_count = max(len(items), self.columnCount())
if col_coun | t != self.columnCount():
self.setColumnCount(col_count)
for col, item_data in enumerate(items):
if isinstance(item_data, str):
name = item_data
elif hasattr(item_data, '__iter__') and len(item_data) == 2:
name, item_data = item_data
elif isinstance(item_data, float):
name = '{:.4f}'.format(item_data)
else:
name = str(item_data)
if isinstance(item_data, (float, int, numpy.number)):
item = self.TableWidgetNumericItem(name)
else:
item = QtGui.QTableWidgetItem(name)
item.setData(self.ITEM_DATA_ROLE, item_data)
if col in self._column_filter:
item = self._column_filter[col](item) or item
self.setItem(row, col, item)
self.resizeColumnsToContents()
self.resizeRowsToContents()
if row_data is not None:
self.setRowData(row, row_data)
def rowData(self, row:int):
return self.item(row, 0).data(self.ROW_DATA_ROLE)
def setRowData(self, row:int, data):
self.item(row, 0).setData(self.ROW_DATA_ROLE, data)
def setColumnFilter(self, item_filter_func, columns:int or list):
"""
Pass item(s) at column(s) through `item_filter_func` before
insertion. Useful for setting specific columns to bold or similar.
"""
try: iter(columns)
except TypeError: columns = [columns]
for i in columns:
self._column_filter[i] = item_filter_func
def clear(self):
super().clear()
self.setRowCount(0)
self.setColumnCount(0)
def selectFirstRow(self):
if self.rowCount() > 0:
self.selectRow(0)
def selectRowsWhere(self, col, value, n_hits=-1,
flags=QtCore.Qt.MatchExactly, _select=True):
"""
Select (also return) at most `n_hits` rows where column `col`
has value (``data()``) `value`.
"""
model = self.model()
matches = model.match(model.index(0, col),
self.ITEM_DATA_ROLE,
value,
n_hits,
flags)
model = self.selectionModel()
selection_flag = model.Select if _select else model.Deselect
for index in matches:
if _select ^ model.isSelected(index):
model.select(index, selection_flag | model.Rows)
return matches
def deselectRowsWhere(self, col, value, n_hits=-1,
flags=QtCore.Qt.MatchExactly):
"""
Deselect (also return) at most `n_hits` rows where column `col`
has value (``data()``) `value`.
"""
return self.selectRowsWhere(col, value, n_hits, flags, False)
class WebviewWidget(QtWebKit.QWebView):
"""WebKit window in a window"""
def __init__(self, parent=None, bridge=None, html=None, debug=None):
"""
Parameters
----------
parent: QObject
Parent QObject. If parent has layout(), this widget is |
naresh21/synergetics-edx-platform | cms/djangoapps/contentstore/courseware_index.py | Python | agpl-3.0 | 27,600 | 0.003043 | """ Code to allow module store to interface with courseware index """
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from datetime import timedelta
import logging
import re
from six import add_metaclass
from django.conf import settings
from django.utils.translation import ugettext_lazy, ugettext as _
from django.core.urlresolvers import resolve
from contentstore.course_group_config import GroupConfiguration
from course_modes.models import CourseMode
from eventtracking import tracker
from openedx.core.lib.courses import course_image_url
from search.search_engine_base import SearchEngine
from xmodule.annotator_mixin import html_to_text
from xmodule.modulestore import ModuleStoreEnum
from xmodule.library_tools import normalize_key_for_search
# REINDEX_AGE is the default amount of time that we look back for changes
# that might have happened. If we are provided with a time at which the
# indexing is triggered, then we know it is safe to only index items
# recently changed at that time. This is the time period that represents
# how far back from the trigger point to look back in order to index
REINDEX_AGE = timedelta(0, 60) # 60 seconds
log = logging.getLogger('edx.modulestore')
def strip_html_content_to_text(html_content):
""" Gets only the textual part for html content - useful for building text to be searched """
# Removing HTML-encoded non-breaking space characters
text_content = re.sub(r"(\s| |//)+", " ", html_to_text(html_content))
# Removing HTML CDATA
text_content = re.sub(r"<!\[CDATA\[.*\]\]>", "", text_content)
# Removing HTML comments
text_content = re.sub(r"<!--.*-->", "", text_content)
return text_content
def indexing_is_enabled():
"""
Checks to see if the indexing feature is enabled
"""
return settings.FEATURES.get('ENABLE_COURSEWARE_INDEX', False)
class SearchIndexingError(Exception):
""" Indicates some error(s) occured during indexing """
def __init__(self, message, error_list):
super(SearchIndexingError, self).__init__(message)
| self.error_list = error_list
@add_metaclass(ABCMeta)
class SearchIndexerBase(object):
"""
Base class to perform indexin | g for courseware or library search from different modulestores
"""
__metaclass__ = ABCMeta
INDEX_NAME = None
DOCUMENT_TYPE = None
ENABLE_INDEXING_KEY = None
INDEX_EVENT = {
'name': None,
'category': None
}
@classmethod
def indexing_is_enabled(cls):
"""
Checks to see if the indexing feature is enabled
"""
return settings.FEATURES.get(cls.ENABLE_INDEXING_KEY, False)
@classmethod
@abstractmethod
def normalize_structure_key(cls, structure_key):
""" Normalizes structure key for use in indexing """
@classmethod
@abstractmethod
def _fetch_top_level(cls, modulestore, structure_key):
""" Fetch the item from the modulestore location """
@classmethod
@abstractmethod
def _get_location_info(cls, normalized_structure_key):
""" Builds location info dictionary """
@classmethod
def _id_modifier(cls, usage_id):
""" Modifies usage_id to submit to index """
return usage_id
@classmethod
def remove_deleted_items(cls, searcher, structure_key, exclude_items):
"""
remove any item that is present in the search index that is not present in updated list of indexed items
as we find items we can shorten the set of items to keep
"""
response = searcher.search(
doc_type=cls.DOCUMENT_TYPE,
field_dictionary=cls._get_location_info(structure_key),
exclude_dictionary={"id": list(exclude_items)}
)
result_ids = [result["data"]["id"] for result in response["results"]]
searcher.remove(cls.DOCUMENT_TYPE, result_ids)
@classmethod
def index(cls, modulestore, structure_key, triggered_at=None, reindex_age=REINDEX_AGE):
"""
Process course for indexing
Arguments:
modulestore - modulestore object to use for operations
structure_key (CourseKey|LibraryKey) - course or library identifier
triggered_at (datetime) - provides time at which indexing was triggered;
useful for index updates - only things changed recently from that date
(within REINDEX_AGE above ^^) will have their index updated, others skip
updating their index but are still walked through in order to identify
which items may need to be removed from the index
If None, then a full reindex takes place
Returns:
Number of items that have been added to the index
"""
error_list = []
searcher = SearchEngine.get_search_engine(cls.INDEX_NAME)
if not searcher:
return
structure_key = cls.normalize_structure_key(structure_key)
location_info = cls._get_location_info(structure_key)
# Wrap counter in dictionary - otherwise we seem to lose scope inside the embedded function `prepare_item_index`
indexed_count = {
"count": 0
}
# indexed_items is a list of all the items that we wish to remain in the
# index, whether or not we are planning to actually update their index.
# This is used in order to build a query to remove those items not in this
# list - those are ready to be destroyed
indexed_items = set()
# items_index is a list of all the items index dictionaries.
# it is used to collect all indexes and index them using bulk API,
# instead of per item index API call.
items_index = []
def get_item_location(item):
"""
Gets the version agnostic item location
"""
return item.location.version_agnostic().replace(branch=None)
def prepare_item_index(item, skip_index=False, groups_usage_info=None):
"""
Add this item to the items_index and indexed_items list
Arguments:
item - item to add to index, its children will be processed recursively
skip_index - simply walk the children in the tree, the content change is
older than the REINDEX_AGE window and would have been already indexed.
This should really only be passed from the recursive child calls when
this method has determined that it is safe to do so
Returns:
item_content_groups - content groups assigned to indexed item
"""
is_indexable = hasattr(item, "index_dictionary")
item_index_dictionary = item.index_dictionary() if is_indexable else None
# if it's not indexable and it does not have children, then ignore
if not item_index_dictionary and not item.has_children:
return
item_content_groups = None
if item.category == "split_test":
split_partition = item.get_selected_partition()
for split_test_child in item.get_children():
if split_partition:
for group in split_partition.groups:
group_id = unicode(group.id)
child_location = item.group_id_to_child.get(group_id, None)
if child_location == split_test_child.location:
groups_usage_info.update({
unicode(get_item_location(split_test_child)): [group_id],
})
for component in split_test_child.get_children():
groups_usage_info.update({
unicode(get_item_location(component)): [group_id]
})
if groups_usage_info:
item_location = get_item_location(item)
item_content_groups = groups_usage_info.get(unicode(item_location), None)
item_id = unicode(c |
openstack/neutron-lib | neutron_lib/api/definitions/tag_ports_during_bulk_creation.py | Python | apache-2.0 | 1,024 | 0 | # Copyright (c) 2019 Verizon Media
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either e | xpress or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
ALIAS = 'tag-ports-during-bulk-creation'
IS_SHIM_EXTENSION = True
IS_STANDARD_ATTR_EXTENSION = False
NAME = 'Tag Ports During Bulk Creation'
DESCRIPTION = 'Allow to tag ports during bulk creation'
UPDATED_TIMESTAMP = '2019-12-29T19:00:00-00:00'
RESOURCE_ATTRIBUTE_MAP = {}
SUB_RESOURCE_ATTRIBUTE_MAP = {}
ACTION_MAP = {}
REQUIRED_EXTENSIONS = []
OPTIONAL_EXTENSIONS = []
ACTION_ | STATUS = {}
|
AxisPhilly/lobbying.ph-django | lobbyingph/migrations/0030_auto__add_field_agency_alias.py | Python | mit | 14,317 | 0.007753 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Agency.alias'
db.add_column('lobbyingph_agency', 'alias',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Agency.alias'
db.delete_column('lobbyingph_agency', 'alias')
models = {
'lobbyingph.agency': {
'Meta': {'ordering': "['name']", 'object_name': 'Agency'},
'alias': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.article': {
'Meta': {'ordering': "['-date']", 'object_name': 'Article'},
'date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2012, 10, 19, 0, 0)'}),
'headline': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Issue']", 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'quote': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'lobbyingph.bill': {
'Meta': {'ordering': "['number']", 'object_name': 'Bill'},
'bill_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '10'}),
'url': ('django.db.models.fields.URLField', [], {'default': "'http://legislation.phila.gov/detailreport/?key='", 'max_length': '200'})
},
'lobbyingph.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.communication_method': {
'Meta': {'ordering': "['name']", 'object_name': 'Communication_Method'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.exp_direct_comm': {
'Meta': {'object_name': 'Exp_Direct_Comm'},
'agencies': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Agency']", 'null': 'True', 'blank': 'True'}),
'bill': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Bill']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Category']"}),
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Issue']", 'null': 'True', 'blank': 'True'}),
'officials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Official']", 'null': 'True', 'blank': 'True'}),
'other_desc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'lobbyingph.exp_indirect_comm': {
'Meta': {'object_name': 'Exp_Indirect_Comm'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Agency']", 'null': 'True', 'blank': 'True'}),
'bill': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Bill']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Category']"}),
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Receipent_Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), |
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Issue']", 'null': 'True', 'blank': 'True'}),
'methods': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Communication_Method']", 'null': 'True', 'blank': 'True'}),
'officials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Official']", 'null': 'True', 'blan | k': 'True'}),
'other_desc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.SmallIntegerField', [], {})
},
'lobbyingph.exp_other': {
'Meta': {'object_name': 'Exp_Other'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Agency']", 'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'official': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Official']", 'null': 'True'}),
'place': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Principal']", 'null': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '12', 'decimal_places': '2'})
},
'lobbyingph.filing': {
'Meta': {'object_name': 'Filing'},
'corrected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'error_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'errors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'firms': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Firm']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lobbyists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Lobbyist']", 'null': 'True', 'blank': 'True'}),
'principal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Principal']", 'null': 'True', 'blank': 'True'}),
'quarter': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'total_exp_direct_comm': ('django.db.models.fields.DecimalField', [], {'default': '0 |
osbd/osbd-2016 | slides/crist/get_data.py | Python | mit | 1,419 | 0 | import os
import urllib
from glob import glob
import dask.bag as db
import numpy as np
import zarr
from dask.diagnostics import ProgressBar
from netCDF4 import Dataset
def download(url):
opener = urllib.URLopener()
filename = os.path.basename(url)
path = os.path.join('data', filename)
opener.retrieve(url, path)
def download_weather():
# Create data directory
if not os.path.exists('data'):
os.mkdir('data')
template = ('http://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/'
'noaa.oisst.v2.highres/sst.day.mean.{year}.v2.nc')
urls = [template.format(year=year) for year in range(1981, 2016)]
b = db.from_sequence(urls, partition_size=1)
print("Downloading Weather Data")
print("----------------- | -------")
with ProgressBar():
b.map(download).compute(n_workers=8)
def transform_weather():
if os.path.exists('sst.day.mean.v2.zarr'):
return
datasets = [Dataset(path)['sst'] for path in sorted(glob('data/*.nc'))]
n = sum(d.shape[0] for d in datasets)
shape = (n, 720, 1440)
chunks = (72, 360, 360)
f = | zarr.open_array('sst.day.mean.v2.zarr', shape=shape, chunks=chunks,
dtype='f4')
i = 0
for d in datasets:
m = d.shape[0]
f[i:i + m] = d[:].filled(np.nan)
i += m
if __name__ == '__main__':
download_weather()
transform_weather()
|
CommitAnalyzingService/CAS_Analyzer | githubissuetracker.py | Python | gpl-2.0 | 3,106 | 0.03123 | """
file: gitissuetracker.py
author: Christoffer Rosen <cbr4830@rit.edu>
date: December, 2013
description: Represents a Github Issue tracker object used
for getting the dates issues were opened.
12/12/13: Doesn't currently support private repos
"""
import requests, json, dateutil.parser, time
from caslogging import logging
class GithubIssueTracker:
"""
GitIssueTracker()
Represents a Github Issue Tracker Object
"""
owner = None # Owner of the github repo
repo = None # The repo name
request_repos = "https://api.github.com/repos" # Request url to get issue info
request_auth = "https://api.github.com/authorizations" # Request url for auth
def __init__(self, owner, repo):
"""
Constructor
"""
self.owner = owner
self.repo = repo
self.auth_token = None
self.authenticate() # Authenticate our app
def authenticate(self):
"""
authenticate()
Authenticates this application to github using
the cas-user git user credentials. This is temporary!
"""
s = requests.Session()
s.auth = ("cas-user", "riskykiwi1")
payload = {"scopes": ["repo"]}
r = s.get(self.request_auth, params=payload)
if r.headers.get('x-ratelimit-remaining') == '0':
logging.info("Github quota limit hit -- waiting")
# Wait up to a hour until we can continue..
while r.headers.get('x-ratelimit-remaining') == '0':
time.sleep(600) # Wait 10 minutes and try again
r = s.get(self.request_auth, params=payload)
data = r.json()
data = r.json()[0]
if r.status_code >= 400:
msg = data.get('message')
logging.error("Failed to authenticate issue tracker: \n" +msg)
return # Exit
else:
self.auth_token = data.get("token")
requests_left = r.headers.get('x-ratelimit-remaining')
logging.info("Analyzer has " + requests_left + " issue tracker calls left this hour")
def getDateOpened(self, issueNumber):
"""
getDateOpened()
Gets the date the issue number was opened in unix time
If issue cannot be found for whichever reason, returns null.
"""
header = {'Authorization': 'token ' + self.auth_token}
r = requests.get(self.request_repos + "/" + self.owner + "/" +
self.repo + "/issues/" + issueNumber, headers=header)
data = r.json()
# If forbidden
if r.status_code == 403:
# Check the api quota
if r.headers.get('x-ratelimit-remaining') == '0':
logging.info("Github quota limit hit -- waiting")
# Wait up to a hour until we can continue..
while r.headers.get('x-ratelimit-remaining') == '0':
time.sleep(600) # Wait 10 minutes and try again
r = requests.get(self.request_repos + "/" + self.owner + "/" +
self.repo + "/issues/" + issueNumber, headers=header)
data = r.json()
# Check for other error codes
elif r.status_code >= 400:
msg = data.get('message')
logging.error("ISSUE TRACKER FAILURE: \n" + msg)
return None
else:
try:
date = (dateutil.parser.parse(data.get('created_at'))).timestamp()
return date
except:
logging. | err | or("ISSUE TRACKER FAILURE: Could not get created_at from github issues API")
return None
|
bregman-arie/ansible | lib/ansible/modules/packaging/os/redhat_subscription.py | Python | gpl-3.0 | 28,269 | 0.002299 | #!/usr/bin/python
# James Laska (jlaska@redhat.com)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: redhat_subscription
short_description: Manage registration and subscriptions to RHSM using the C(subscription-manager) command
description:
- Manage registration and subscription to the Red Hat Subscription Management entitlement platform using the C(subscription-manager) command
version_added: "1.2"
author: "Barnaby Court (@barnabycourt)"
notes:
- In order to register a system, subscription-manager requires either a username and password, or an activationkey and an Organization ID.
- Since 2.5 values for I(server_hostname), I(server_insecure), I(rhsm_baseurl),
I(server_proxy_hostname), I(server_proxy_port), I(server_proxy_user) and
I(server_proxy_password) are no longer taken from the C(/etc/rhsm/rhsm.conf)
config file and default to None.
requirements:
- subscription-manager
options:
state:
description:
- whether to register and subscribe (C(present)), or unregister (C(absent)) a system
choices: [ "present", "absent" ]
default: "present"
username:
description:
- access.redhat.com or Sat6 username
password:
description:
- access.redhat.com or Sat6 password
server_hostname:
description:
- Specify an alternative Red Hat Subscription Management or Sat6 server
server_insecure:
description:
- Enable or disable https server certificate verification when connecting to C(server_hostname)
rhsm_baseurl:
description:
- Specify CDN baseurl
server_proxy_hostname:
description:
- Specify a HTTP proxy hostname
version_added: "2.4"
server_proxy_port:
description:
- Specify a HTTP proxy port
version_added: "2.4"
server_proxy_user:
description:
- Specify a user for HTTP proxy with basic authentication
version_added: "2.4"
server_proxy_password:
description:
- Specify a password for HTTP proxy with basic authentication
version_added: "2.4"
auto_attach:
description:
- Upon successful registration, auto-consume available subscriptions
- Added in favor of depracated autosubscribe in 2.5.
type: bool
default: 'no'
version_added: "2.5"
aliases: [autosubscribe]
activationkey:
description:
- supply an activation key for use with registration
org_id:
description:
- Organization ID to use in conjunction with activationkey
version_added: "2.0"
environment:
description:
- Register with a specific environment in the destination org. Used with Red Hat Satellite 6.x or Katello
version_added: "2.2"
pool:
description:
- |
Specify a subscription pool name to consume. Regular expressions accepted. Use I( | pool_ids) instead if
| possible, as it is much faster. Mutually exclusive with I(pool_ids).
default: '^$'
pool_ids:
description:
- |
Specify subscription pool IDs to consume. Prefer over I(pool) when possible as it is much faster.
A pool ID may be specified as a C(string) - just the pool ID (ex. C(0123456789abcdef0123456789abcdef)),
or as a C(dict) with the pool ID as the key, and a quantity as the value (ex.
C(0123456789abcdef0123456789abcdef: 2). If the quantity is provided, it is used to consume multiple
entitlements from a pool (the pool must support this). Mutually exclusive with I(pool).
default: []
version_added: "2.4"
consumer_type:
description:
- The type of unit to register, defaults to system
version_added: "2.1"
consumer_name:
description:
- Name of the system to register, defaults to the hostname
version_added: "2.1"
consumer_id:
description:
- |
References an existing consumer ID to resume using a previous registration
for this system. If the system's identity certificate is lost or corrupted,
this option allows it to resume using its previous identity and subscriptions.
The default is to not specify a consumer ID so a new ID is created.
version_added: "2.1"
force_register:
description:
- Register the system even if it is already registered
type: bool
default: 'no'
version_added: "2.2"
'''
EXAMPLES = '''
- name: Register as user (joe_user) with password (somepass) and auto-subscribe to available content.
redhat_subscription:
state: present
username: joe_user
password: somepass
auto_attach: true
- name: Same as above but subscribe to a specific pool by ID.
redhat_subscription:
state: present
username: joe_user
password: somepass
pool_ids: 0123456789abcdef0123456789abcdef
- name: Register and subscribe to multiple pools.
redhat_subscription:
state: present
username: joe_user
password: somepass
pool_ids:
- 0123456789abcdef0123456789abcdef
- 1123456789abcdef0123456789abcdef
- name: Same as above but consume multiple entitlements.
redhat_subscription:
state: present
username: joe_user
password: somepass
pool_ids:
- 0123456789abcdef0123456789abcdef: 2
- 1123456789abcdef0123456789abcdef: 4
- name: Register and pull existing system data.
redhat_subscription:
state: present
username: joe_user
password: somepass
consumer_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- name: Register with activationkey and consume subscriptions matching Red Hat Enterprise Server or Red Hat Virtualization
redhat_subscription:
state: present
activationkey: 1-222333444
org_id: 222333444
pool: '^(Red Hat Enterprise Server|Red Hat Virtualization)$'
- name: Update the consumed subscriptions from the previous example (remove Red Hat Virtualization subscription)
redhat_subscription:
state: present
activationkey: 1-222333444
org_id: 222333444
pool: '^Red Hat Enterprise Server$'
- name: Register as user credentials into given environment (against Red Hat Satellite 6.x), and auto-subscribe.
redhat_subscription:
state: present
username: joe_user
password: somepass
environment: Library
auto_attach: true
'''
RETURN = '''
subscribed_pool_ids:
description: List of pool IDs to which system is now subscribed
returned: success
type: complex
contains: {
"8a85f9815ab905d3015ab928c7005de4": "1"
}
'''
import os
import re
import shutil
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.six.moves import configparser
SUBMAN_CMD = None
class RegistrationBase(object):
def __init__(self, module, username=None, password=None):
self.module = module
self.username = username
self.password = password
def configure(self):
raise NotImplementedError("Must be implemented by a sub-class")
def enable(self):
# Remove any existing redhat.repo
redhat_repo = '/etc/yum.repos.d/redhat.repo'
if os.path.isfile(redhat_repo):
os.unlink(redhat_repo)
def register(self):
raise NotImplementedError("Must be implemented by a sub-class")
def unregister(self):
raise NotImplementedError("Must be implemented by a sub-class")
def unsubscribe(self):
raise NotImplementedError("Must be implemented by a sub-class")
def update_plugin_conf(self, plugin, enabled=True):
plugin_conf |
gagoncal/Selenium | setuptools-21.2.2/setuptools/tests/test_test.py | Python | lgpl-2.1 | 2,373 | 0 | # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import os
import site
from distutils.errors import DistutilsError
import pytest
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS("""
from setuptools import setup
setup(name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
)
""")
NS_INIT = DALS("""
# -*- coding: Latin-1 -*-
# Söme Arbiträry Ünicode to test Distribute Issüé 310
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
""")
TEST_PY = DAL | S("""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
test_suite = unittest.makeSuite | (TestTest)
""")
@pytest.fixture
def sample_test(tmpdir_cwd):
os.makedirs('name/space/tests')
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
# name/__init__.py
with open('name/__init__.py', 'wb') as f:
f.write(NS_INIT.encode('Latin-1'))
# name/space/__init__.py
with open('name/space/__init__.py', 'wt') as f:
f.write('#empty\n')
# name/space/tests/__init__.py
with open('name/space/tests/__init__.py', 'wt') as f:
f.write(TEST_PY)
@pytest.mark.skipif('hasattr(sys, "real_prefix")')
@pytest.mark.usefixtures('user_override')
@pytest.mark.usefixtures('sample_test')
class TestTestTest:
def test_test(self):
params = dict(
name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
use_2to3=True,
)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
with contexts.quiet():
# The test runner calls sys.exit
with contexts.suppress_exceptions(SystemExit):
cmd.run()
|
uber/pyro | pyro/contrib/oed/util.py | Python | apache-2.0 | 1,232 | 0.002435 | # Copyright (c) 2017-2019 Uber Technologies, Inc.
# SPDX-License-Identifier: Apache-2.0
import math
import torch
from pyro.contrib.oed.glmm import analytic_posterior_cov
f | rom pyro.contrib.util import get_indices
from pyro.infer.autoguide.utils import mean_field_entropy
def linear_model_ground_truth(
model, design, observation_labels, target_labels, eig=True
):
if isinstance(target_labels, str):
target_labels = [target_labels]
w_sd = torch.cat(list(model.w_sds.values()), dim=-1)
prior_cov = torch.diag(w_sd ** 2)
design_shape = design.shape
posterior_covs = [
| analytic_posterior_cov(prior_cov, x, model.obs_sd)
for x in torch.unbind(design.reshape(-1, design_shape[-2], design_shape[-1]))
]
target_indices = get_indices(target_labels, tensors=model.w_sds)
target_posterior_covs = [
S[target_indices, :][:, target_indices] for S in posterior_covs
]
output = torch.tensor(
[0.5 * torch.logdet(2 * math.pi * math.e * C) for C in target_posterior_covs]
)
if eig:
prior_entropy = mean_field_entropy(model, [design], whitelist=target_labels)
output = prior_entropy - output
return output.reshape(design.shape[:-2])
|
cernanalysispreservation/analysis-preservation.cern.ch | cap/modules/repos/models.py | Python | gpl-2.0 | 6,089 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of CERN Analysis Preservation Framework.
# Copyright (C) 2016 CERN.
#
# CERN Analysis Preservation Framework is free software; you can redistribute
# it and/or modify it under the terms of the GNU Genera | l Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Analysis Preservation Framework is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHA | NTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Analysis Preservation Framework; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Models for Git repositories and snapshots."""
from __future__ import absolute_import, print_function
from datetime import datetime
from invenio_accounts.models import User
from invenio_db import db
from invenio_files_rest.models import ObjectVersion
from invenio_records.models import RecordMetadata
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy_utils.types import UUIDType
from cap.types import json_type
class GitRepository(db.Model):
"""Information about a GitHub repository."""
id = db.Column(db.Integer, primary_key=True)
external_id = db.Column(db.Integer, unique=False, nullable=False)
host = db.Column(db.String(255), nullable=False)
owner = db.Column(db.String(255), nullable=False)
name = db.Column(db.String(255), nullable=False)
__tablename__ = 'git_repository'
__table_args__ = db.UniqueConstraint(
'host', 'owner', 'name', name='uq_git_repository_unique_constraint'),
@classmethod
def create_or_get(cls, external_id, host, owner, name):
"""."""
try:
repo = cls.query.filter_by(host=host, owner=owner, name=name).one()
except NoResultFound:
repo = cls(external_id=external_id,
host=host,
owner=owner,
name=name)
db.session.add(repo)
return repo
class GitWebhook(db.Model):
"""Webook for a Git repository."""
__tablename__ = 'git_webhook'
__table_args__ = db.UniqueConstraint(
'event_type', 'repo_id', name='uq_git_webhook_unique_constraint'),
id = db.Column(db.Integer, primary_key=True)
branch = db.Column(db.String(255), nullable=True)
event_type = db.Column(db.String(255), nullable=False)
external_id = db.Column(db.Integer, nullable=False)
secret = db.Column(db.String(32), nullable=True)
repo_id = db.Column(db.Integer, db.ForeignKey(GitRepository.id))
repo = db.relationship(GitRepository,
backref=db.backref("webhooks",
cascade="all, delete-orphan"))
class GitSnapshot(db.Model):
"""Snapshot information for a Git repo."""
__tablename__ = 'git_snapshot'
id = db.Column(db.Integer, primary_key=True)
# webhook payload / event
payload = db.Column(json_type, default={}, nullable=True)
webhook_id = db.Column(db.Integer,
db.ForeignKey(GitWebhook.id),
nullable=False)
webhook = db.relationship(GitWebhook,
backref=db.backref("snapshots",
cascade="all, delete-orphan"))
created = db.Column(db.DateTime, default=datetime.utcnow)
class GitSubscriberSnapshots(db.Model):
"""Connection model between snapshot and webhook subscribers."""
__tablename__ = 'git_subscriber_snapshots'
snapshot_id = db.Column(db.Integer,
db.ForeignKey('git_snapshot.id'),
primary_key=True)
subscriber_id = db.Column(db.Integer,
db.ForeignKey('git_subscriber.id'),
primary_key=True)
class GitWebhookSubscriber(db.Model):
"""Records subscribed to the git repository events."""
__tablename__ = 'git_subscriber'
__table_args__ = db.UniqueConstraint(
'record_id',
'webhook_id',
name='uq_git_webhook_subscriber_unique_constraint'),
id = db.Column(db.Integer, primary_key=True)
status = db.Column(db.Enum('active', 'deleted',
name='git_webhook_status'),
nullable=False,
default='active')
record_id = db.Column(UUIDType,
db.ForeignKey(RecordMetadata.id),
nullable=False)
record = db.relationship(RecordMetadata,
backref=db.backref("webhooks",
cascade="all, delete-orphan"))
webhook_id = db.Column(db.Integer,
db.ForeignKey(GitWebhook.id),
nullable=False)
webhook = db.relationship(GitWebhook,
backref=db.backref("subscribers",
cascade="all, delete-orphan"))
user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
user = db.relationship(User)
snapshots = db.relationship(GitSnapshot,
order_by="desc(GitSnapshot.created)",
secondary='git_subscriber_snapshots')
@property
def repo(self):
return self.webhook.repo
ObjectVersion.snapshot_id = db.Column(db.Integer,
db.ForeignKey(GitSnapshot.id),
nullable=True)
ObjectVersion.snapshot = db.relationship(GitSnapshot, backref='files')
|
daodaoliang/python-phonenumbers | python/phonenumbers/geodata/data9.py | Python | apache-2.0 | 912,681 | 0.021683 | """Per-prefix data, mapping each prefix to a dict of locale:name.
Auto-generated file, do not edit by hand.
"""
from ..util import u
# Copyright (C) 2011-2015 The Libphonenumber Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
data = {
'86136376':{'en': 'Haikou, Hainan', 'zh': u('\u6d77\u5357\u7701\u6d77\u53e3\u5e02')},
'86136377':{'en': 'Chongqing', 'zh': u('\u91cd\u5e86\u5e02')},
'86136375':{'en': 'Haikou, Hainan', 'zh': u('\u6d77\u5357\u7701\u6d77\u53e3\u5e02')},
'86136378':{'en': 'Chongqing', 'zh': u('\u91cd\u5e86\u5e02')},
'86136379':{'en': 'Chongqing', 'zh': u('\u91cd\u5e86\u5e02')},
'861362439':{'en': 'Baishan, Jilin', 'zh': u('\u5409\u6797\u7701\u767d\u5c71\u5e02')},
'861364553':{'en': 'Wuhu, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u829c\u6e56\u5e02')},
'861364220':{'en': 'Shantou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5934\u5e02')},
'861364551':{'en': 'Hefei, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u5408\u80a5\u5e02')},
'86135259':{'en': 'Luoyang, Henan', 'zh': u('\u6cb3\u5357\u7701\u6d1b\u9633\u5e02')},
'861364550':{'en': 'Chuzhou, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u6ec1\u5dde\u5e02')},
'861364557':{'en': 'Suzhou, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u5bbf\u5dde\u5e02')},
'861364556':{'en': 'Anqing, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u5b89\u5e86\u5e02')},
'861364555':{'en': 'MaAnshan, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u9a6c\u978d\u5c71\u5e02')},
'861364226':{'en': 'Zhaoqing, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u8087\u5e86\u5e02')},
'861369855':{'en': 'Bijie, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u6bd5\u8282\u5730\u533a')},
'861366375':{'en': 'Pingdingshan, Henan', 'zh': u('\u6cb3\u5357\u7701\u5e73\u9876\u5c71\u5e02')},
'861369854':{'en': 'Bijie, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u6bd5\u8282\u5730\u533a')},
'861369857':{'en': 'Bijie, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u6bd5\u8282\u5730\u533a')},
'861355805':{'en': 'Yulin, Guangxi', 'zh': u('\u5e7f\u897f\u7389\u6797\u5e02')},
'861355804':{'en': 'Wuzhou, Guangxi', 'zh': u('\u5e7f\u897f\u68a7\u5dde\u5e02')},
'861355807':{'en': 'Qinzhou, Guangxi', 'zh': u('\u5e7f\u897f\u94a6\u5dde\u5e02')},
'861355806':{'en': 'Baise, Guangxi', 'zh': u('\u5e7f\u897f\u767e\u8272\u5e02')},
'861355801':{'en': 'Nanning, Guangxi', 'zh': u('\u5e7f\u897f\u5357\u5b81\u5e02')},
'861355800':{'en': 'Fangchenggang, Guangxi', 'zh': u('\u5e7f\u897f\u9632\u57ce\u6e2f\u5e02')},
'861355803':{'en': 'Guilin, Guangxi', 'zh': u('\u5e7f\u897f\u6842\u6797\u5e02')},
'861355802':{'en': 'Liuzhou, Guangxi', 'zh': u('\u5e7f\u897f\u67f3\u5dde\u5e02')},
'861369851':{'en': 'Anshun, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u5b89\u987a\u5e02')},
'861355809':{'en': 'Beihai, Guangxi', 'zh': u('\u5e7f\u897f\u5317\u6d77\u5e02')},
'861355808':{'en': 'Hechi, Guangxi', 'zh': u('\u5e7f\u897f\u6cb3\u6c60\u5e02')},
'861369850':{'en': 'Anshun, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u5b89\u987a\u5e02')},
'861366374':{'en': 'Xuchang, Henan', 'zh': u('\u6cb3\u5357\u7701\u8bb8\u660c\u5e02')},
'861369853':{'en': 'Bijie, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u6bd5\u8282\u5730\u533a')},
'861369852':{'en': 'Anshun, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u5b89\u987a\u5e02')},
'861353959':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861353958':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861367056':{'en': 'Jieyang, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u63ed\u9633\u5e02')},
'861353951':{'en': 'Qingyuan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6e05\u8fdc\u5e02')},
'861353950':{'en': 'Qingyuan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6e05\u8fdc\u5e02')},
'861353953':{'en': 'Shanwei, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5c3e\u5e02')},
'861353952':{'en': 'Qingyuan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6e05\u8fdc\u5e02')},
'861353955':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861353954':{'en': 'Shanwei, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5c3e\u5e02')},
'861353957':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861353956':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'86135113':{'en': 'Jiaxing, Zhejiang', 'zh': u('\u6d59\u6c5f\u7701\u5609\u5174\u5e02')},
'86135440':{'en': 'Shenzhen, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'86135443':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'86135110':{'en': 'Beijing', 'zh': u('\u5317\u4eac\u5e02')},
'86135445':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'86135444':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'86135447':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'86135446':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'861361451':{'en': 'Harbin, Heilongjiang', 'zh': u('\u9ed1\u9f99\u6c5f\u7701\u54c8\u5c14\u6ee8\u5e02')},
'86135448':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'86135119':{'en': 'Guiyang, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u8d35\u9633\u5e02')},
'86135118':{'en': 'Zunyi, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u9075\u4e49\u5e02')},
'861361455':{'en': 'Suihua, Heilongjiang', 'zh': u('\u9ed1\u9f99\u6c5f\u7701\u7ee5\u5316\u5e02')},
'861361454':{'en': 'Jiamusi, Heilongjiang', 'zh': u('\u9ed1\u9f99\u6c5f\u7701\u4f73\u6728\u65af\u5e02')},
'861361457':{'en': 'Da Hinggan Ling, Heilongjiang', 'zh': u('\u9ed1\u9f99\u6c5f\u7701\u5927\u5174\u5b89\u5cad\u5730\u533a')},
'861361456':{'en': 'Heihe, Heilongjiang', 'zh': u('\u9ed1\u9f99\u6c5f\u7701\u9ed1\u6cb3\u5e02')},
'861367051':{'en': 'Shantou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5934\u5e02')},
'861367050':{'en': 'Shantou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5934\u5e02')},
'861359195':{'en': 'Huludao, Liaoning', 'zh': u('\u8fbd\u5b81\u7701\u846b\u82a6\u5c9b\u5e02')},
'861366371':{'en': 'Zhengzhou, Henan', 'zh': u('\u6cb3\u5357\u7701\u90d1\u5dde\u5e02')},
'861359194':{'en': 'Chaoyang, Liaoning', 'zh': u('\u8fbd\u5b81\u7701\u671d\u9633\u5e02')},
'861368471':{'en': 'Hohhot, Inner Mongolia', 'zh': u('\u5185\u8499\u53e4\u547c\u548c\u6d69\u7279\u5e02')},
'861365226':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b | \u5c71\u5e02')},
'861365227':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701 | \u4f5b\u5c71\u5e02')},
'861365224':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861365225':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861365222':{'en': 'Zhongshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e2d\u5c71\u5e02')},
'861365223':{'en': 'Zhongshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e2d\u5c71\u5e02')},
'861365220':{'en': 'Zhongshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e2d\u5c71\u5e02')},
'861365221':{'en': 'Zhongshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e2d\u5c71\u5e02')},
'861366373':{'en': 'Xinxiang, Henan', 'zh': u('\u6cb3\u5357\u7701\u65b0\u4e61\u5e02')},
'861365228':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b\u5c71\u5e02')},
'861365229':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b\u5c71\u5e02')},
'861350978':{'en': 'Datong, Shanxi', 'zh': u('\u5c71\u897f\u7701\u5927\u540c\u5e02')},
'861350979':{'en': 'Yuncheng, Shanxi', 'zh': u('\u5c71\u897f\u7701\u8fd0\u57ce\u5e02')},
'861368708':{'en': 'Nanchang, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5357\u660c\u5e02')},
'861368709':{'en': 'Nanchang, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5357\u660c\u5e02')},
'861350972':{'en': 'Datong, Shanxi', 'zh': u( |
Aigrefin/py3learn | learn/tests/tests_views/tests_exercise_wrong_answer.py | Python | mit | 943 | 0.002121 | from unittest import TestCase
from unittest.mock import patch, call, MagicMock
from django.test import RequestFactory
from learn.infrastructure.database import Database
from learn.views.wrong_answer import exercise_wrong_answer
class ExerciseWrongAnswerTests(TestCase):
@patch("learn.views.wrong_answer.render")
def test_shouldRenderWrongAnswer_WithDictionaryPK_AndTranslationPK_AndTranslation(self, render_mock):
# Given
database = MagicMock(Database)
factory = RequestFactory()
request = factory.get('fake-url')
# When
exercise_wrong_answer(request, 23, 42, database=database)
# Then
expected_args = call(request, 'learn/exercise_wrong_answer.html', context={
'd | ictionary | _pk': 23,
'translation_pk': 42,
'translation': database.get_translation()
})
self.assertEqual(render_mock.call_args_list[0], expected_args)
|
shagi/guifiadmin | quotas/management/commands/quotaInvoices.py | Python | agpl-3.0 | 870 | 0.001149 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.core.management import BaseCommand
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now, make_aware, utc
from quotas import models
DATE_FORMAT = "%Y-%m-%d"
class Command(BaseCommand):
help_text = _("Create invoices for quotas.")
def add_arguments(self, parser):
p | arser.add_argument('-d', '--date', metavar="YYYY-MM-DD", help=_('Create quotas for the month of th | e given date.'))
def handle(self, *args, **options):
date = options['date']
if date is None:
date = now()
else:
date = make_aware(
datetime.datetime.strptime(date, DATE_FORMAT),
utc,
)
print date
models.Quota.objects.create_invoices(date)
|
nvbn/thefuck | tests/output_readers/test_rerun.py | Python | mit | 2,942 | 0.00068 | # -*- encoding: utf-8 -*-
from mock import Mock, patch
from psutil import AccessDenied, TimeoutExpired
from thefuck.output_readers import rerun
class TestRerun(object):
def setup_method(self, test_method):
self.patcher = patch('thefuck.output_readers.rerun.Process')
process_mock = self.patcher.start()
self.proc_mock = process_mock.return_value = Mock()
def teardown_method(self, test_method):
self.patcher.stop()
@patch('thefuck.output_readers.rerun._wait_output', return_value=False)
@patch('thefuck.output_readers.rerun.Popen')
def test_get_output(self, popen_mock, wait_output_mock):
popen_mock.return_value.stdout.read.return_value = b'output'
| assert rerun.get_output('', '') is None
wait_output_mock.assert_called_once()
@patch('thefuck.output_readers.rerun.Popen')
| def test_get_output_invalid_continuation_byte(self, popen_mock):
output = b'ls: illegal option -- \xc3\nusage: ls [-@ABC...] [file ...]\n'
expected = u'ls: illegal option -- \ufffd\nusage: ls [-@ABC...] [file ...]\n'
popen_mock.return_value.stdout.read.return_value = output
actual = rerun.get_output('', '')
assert actual == expected
@patch('thefuck.output_readers.rerun._wait_output')
def test_get_output_unicode_misspell(self, wait_output_mock):
rerun.get_output(u'pácman', u'pácman')
wait_output_mock.assert_called_once()
def test_wait_output_is_slow(self, settings):
assert rerun._wait_output(Mock(), True)
self.proc_mock.wait.assert_called_once_with(settings.wait_slow_command)
def test_wait_output_is_not_slow(self, settings):
assert rerun._wait_output(Mock(), False)
self.proc_mock.wait.assert_called_once_with(settings.wait_command)
@patch('thefuck.output_readers.rerun._kill_process')
def test_wait_output_timeout(self, kill_process_mock):
self.proc_mock.wait.side_effect = TimeoutExpired(3)
self.proc_mock.children.return_value = []
assert not rerun._wait_output(Mock(), False)
kill_process_mock.assert_called_once_with(self.proc_mock)
@patch('thefuck.output_readers.rerun._kill_process')
def test_wait_output_timeout_children(self, kill_process_mock):
self.proc_mock.wait.side_effect = TimeoutExpired(3)
self.proc_mock.children.return_value = [Mock()] * 2
assert not rerun._wait_output(Mock(), False)
assert kill_process_mock.call_count == 3
def test_kill_process(self):
proc = Mock()
rerun._kill_process(proc)
proc.kill.assert_called_once_with()
@patch('thefuck.output_readers.rerun.logs')
def test_kill_process_access_denied(self, logs_mock):
proc = Mock()
proc.kill.side_effect = AccessDenied()
rerun._kill_process(proc)
proc.kill.assert_called_once_with()
logs_mock.debug.assert_called_once()
|
rplevka/robottelo | tests/foreman/cli/test_katello_agent.py | Python | gpl-3.0 | 11,288 | 0.001329 | """CLI tests for ``katello-agent``.
:Requirement: Host
:CaseAutomation: Automated
:CaseLevel: Component
:CaseComponent: Katello-agent
:Assignee: gtalreja
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import time
import pytest
from broker import VMBroker
from robottelo.api.utils import wait_for_errata_applicability_task
from robottelo.cli.activationkey import ActivationKey
from robottelo.cli.factory import make_activation_key
from robottelo.cli.factory import make_host_collection
from robottelo.cli.factory import setup_org_for_a_custom_repo
from robottelo.cli.factory import setup_org_for_a_rh_repo
from robottelo.cli.host import Host
from robottelo.cli.hostcollection import HostCollection
from robottelo.config import settings
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_GROUP
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_GROUP_NAME
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME
from robottelo.constants import FAKE_1_CUSTOM_PACKAGE
from robottelo.constants import FAKE_1_CUSTOM_PACKAGE_NAME
from robottelo.constants import FAKE_2_CUSTOM_PACKAGE
from robottelo.constants import FAKE_2_CUSTOM_PACKAGE_NAME
from robottelo.constants import PRDS
from robottelo.constants import REPOS
from robottelo.constants import REPOSET
from robottelo.hosts import ContentHost
pytestmark = [pytest.mark.skip_if_not_set('clients', 'fake_manifest')]
@pytest.fixture(scope='module')
def katello_agent_repos(module_ak, module_cv, module_lce, module_org):
"""Create Org, Lifecycle Environment, Content View, Activation key"""
setup_org_for_a_rh_repo(
{
'product': PRDS['rhel'],
'repository-set': REPOSET['rhst7'],
'repository': REPOS['rhst7']['name'],
'organization-id': module_org.id,
'content-view-id': module_cv.id,
'lifecycle-environment-id': module_lce.id,
'activationkey-id': module_ak.id,
}
)
# Create custom repository content
setup_org_for_a_custom_repo(
{
'url': settings.repos.yum_1.url,
'organization-id': module_org.id,
'content-view-id': module_cv.id,
'lifecycle-environment-id': module_lce.id,
'activationkey-id': module_ak.id,
}
)
return {
'ak': module_ak,
'cv': module_cv,
'lce': module_lce,
'org': module_org,
}
@pytest.fixture
def katello_agent_client(katello_agent_repos, rhel7_contenthost, default_sat):
rhel7_contenthost.install_katello_ca(default_sat)
# Register content host and install katello-agent
rhel7_contenthost.register_contenthost(
katello_agent_repos['org'].label,
katello_agent_repos['ak'].name,
)
assert rhel7_contenthost.subscribed
host_info = Host.info({'name': rhel7_contenthost.hostname})
rhel7_contenthost.enable_repo(REPOS['rhst7']['id'])
rhel7_contenthost.install_katello_agent()
yield {'client': rhel7_contenthost, 'host_info': host_info}
@pytest.mark.tier3
def test_positive_get_errata_info(katello_agent_client):
"""Get errata info
:id: afb5ab34-1703-49dc-8ddc-5e032c1b86d7
:expectedresults: Errata info was displayed
:CaseLevel: System
"""
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}')
result = Host.errata_info({'host-id': host_info['id'], 'id': settings.repos.yum_0.errata[1]})
assert result[0]['errata-id'] == settings.repos.yum_0.errata[1]
assert FAKE_2_CUSTOM_PACKAGE in result[0]['packages']
@pytest.mark.tier3
@pytest.mark.upgrade
def test_positive_apply_errata(katello_agent_client):
"""Apply errata to a host
:id: 8d0e5c93-f9fd-4ec0-9a61-aa93082a30c5
:expectedresults: Errata is scheduled for installation
:CaseLevel: System
"""
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}')
Host.errata_apply({'errata-ids': settings.repos.yum_0.errata[1], 'host-id': host_info['id']})
@pytest.mark.tier3
def test_positive_apply_security_erratum(katello_agent_client):
"""Apply security erratum to a host
:id: 4d1095c8-d354-42ac-af44-adf6dbb46deb
:expectedresults: erratum is recognized by the
`yum update --security` command on client
:CaseLevel: System
:customerscenario: true
:BZ: 1420671, 1740790
| """
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
client.download_install_rpm(settings.repos.yum_1.url, FAKE_2_CUSTOM_PACKAGE)
# Check the system is up to date
result = client.run('yum update --security | grep "No packages needed for security"')
assert result.status == 0
before_downgrade = int(time.ti | me())
# Downgrade walrus package
client.run(f'yum downgrade -y {FAKE_2_CUSTOM_PACKAGE_NAME}')
# Wait for errata applicability cache is counted
wait_for_errata_applicability_task(int(host_info['id']), before_downgrade)
# Check that host has applicable errata
host_errata = Host.errata_list({'host-id': host_info['id']})
assert host_errata[0]['erratum-id'] == settings.repos.yum_0.errata[1]
assert host_errata[0]['installable'] == 'true'
# Check the erratum becomes available
result = client.run('yum update --assumeno --security | grep "No packages needed for security"')
assert result.status == 1
@pytest.mark.tier3
@pytest.mark.upgrade
def test_positive_install_package(katello_agent_client):
"""Install a package to a host remotely
:id: b1009bba-0c7e-4b00-8ac4-256e5cfe4a78
:expectedresults: Package was successfully installed
:CaseLevel: System
"""
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
Host.package_install({'host-id': host_info['id'], 'packages': FAKE_0_CUSTOM_PACKAGE_NAME})
result = client.run(f'rpm -q {FAKE_0_CUSTOM_PACKAGE_NAME}')
assert result.status == 0
@pytest.mark.tier3
def test_positive_remove_package(katello_agent_client):
"""Remove a package from a host remotely
:id: 573dec11-8f14-411f-9e41-84426b0f23b5
:expectedresults: Package was successfully removed
:CaseLevel: System
"""
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}')
Host.package_remove({'host-id': host_info['id'], 'packages': FAKE_1_CUSTOM_PACKAGE_NAME})
result = client.run(f'rpm -q {FAKE_1_CUSTOM_PACKAGE_NAME}')
assert result.status != 0
@pytest.mark.tier3
def test_positive_upgrade_package(katello_agent_client):
"""Upgrade a host package remotely
:id: ad751c63-7175-40ae-8bc4-800462cd9c29
:expectedresults: Package was successfully upgraded
:CaseLevel: System
"""
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}')
Host.package_upgrade({'host-id': host_info['id'], 'packages': FAKE_1_CUSTOM_PACKAGE_NAME})
result = client.run(f'rpm -q {FAKE_2_CUSTOM_PACKAGE}')
assert result.status == 0
@pytest.mark.tier3
def test_positive_upgrade_packages_all(katello_agent_client):
"""Upgrade all the host packages remotely
:id: 003101c7-bb95-4e51-a598-57977b2858a9
:expectedresults: Packages (at least 1 with newer version available)
were successfully upgraded
:CaseLevel: System
"""
client = katello_agent_client['client']
host_info = katello_agent_client['host_info']
client.run(f'yum install -y {FAKE_1_CUSTOM_PACKAGE}')
Host.package_upgrade_all({'host-id': host_info['id']})
result = client.run(f'rpm -q {FAKE_2_CUSTOM_PACKAGE}')
assert result.status == 0
@pytest.mark.tier3
@pytest.mark.upgrade
def test_positive_install_and_remove_package_group(katello_agent_client):
"""Install and remove a package group to a host remotely
:id: ded20a89-cfd9-48d5-8829-739b1a4d4042
:expectedresults: Package group was successfully installed
and r |
eegroopm/pyLATTICE | resources/common.py | Python | gpl-2.0 | 2,942 | 0.027872 | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 21 10:34:18 2014
@author: eegroopm
"""
import os, sys
import pandas as pd
import numpy as np
class common:
def __init__(self):
self.path = os.path.expanduser('~')
#\u0305 is unicode overline character
#self._overline_strings = [u'1\u0305', u'2\u0305' ,u'3\u0305', u'4\u0305', u'5\u0305', u'6\u0305', u'7\u0305',u'8\u0305',u'9\u0305']
#use matplotlib's mathtex rendering for overline strings
self._overline_strings = [r'\\bar{1}',r'\\bar{2}',r'\\bar{3}',
r'\\bar{6}',r'\\bar{5}',r'\\bar{6}',
r'\\bar{7}',r'\\bar{8}',r'\\bar{9}']
self.DSpaces = pd.DataFrame(columns = ['d-space','h','k','l']) #Msum is sum of absolute miller indices, neede for plotting pattern
self.Forbidden = pd.DataFrame(columns = ['d-space','h','k','l'])
self.u = 0
self.v = 0
self.w = 1
self.ZoneAxis = np.array([self.u,self.v,self.w])
self.beamenergy = 200 #keV
self.camlength = 100 #cm
self.camconst = 1.0
self.wavelength = self.Wavelength(self.beamenergy) #angstroms
self._x2 = False
self.a = 1
self.b = 1
self.c = 1
self.astar = 1
self.bstar = 1
self.cstar = 1
self.alpha = 90 #degrees
self.beta = 90
self.gamma = 90
self.alphastar = 90
self.betastar = 90
self.gammastar = 90
#SpaceGroup data
#DataFrame in the form SG Number, Patterson symbol, Geometry,Unit Cell Type, Unit Cell Conditions , Spacegroup conditions
#e.g.
#sg.loc[218] yields:
#Patterson P-43n
#Conditions (h==k and l == 2*n) or (h == 2*n and k==0 and ...
#Name: 218, dtype: object
if sys.version_info[0] == 3: #python3 and python2 pickle h5 files diff | erently. GAH!!
self.sg = pd.read_hdf('resources/SpaceGroups.h5','table')
self.sghex = pd.read_hdf('resources/SpaceGroupsHex.h5','table') #for trigonal crystals with rhombohedral or hexagonal centering
self.mineraldb = pd.read_hdf('resources/MineralDatabase.h5','table')
elif sys.version_info[0] == 2:
self.sg = pd.read_hdf('resources/SpaceGroups_py2.h5','table')
self.sghex = pd.read_hdf('resources/SpaceGroupsH | ex_py2.h5','table')
self.mineraldb = pd.read_hdf('resources/MineralDatabase_py2.h5','table')
self.manualConds = [] #empty list of strings for manual conditions
def Wavelength(self,E):
hbar = 6.626E-34 #m^2 kg/s
me = 9.109E-31 #kg
c = 3E8 #m/s
e = 1.602E-19 #Coulombs
E = E*1000 #turn to eV
wavelength = hbar/np.sqrt(2*me*e*E)/np.sqrt(1 + (e*E)/(2*me*c**2))*(10**10) #angstroms. relativistic formula
return(wavelength) |
codingjoe/django-vies | tests/__init__.py | Python | mit | 157 | 0 | VA | LID_VIES = "LU26375245"
VALID_VIES_COUNTRY_CODE = "LU"
VALID_VIES_NUMBER = "26375245"
VALID_VIES_IE = [
"1234567X",
"1X23456X",
" | 1234567XX",
]
|
Vinay26k/python | SourceForge/Website_to_pdf/Website2PdfCore.py | Python | mit | 643 | 0.018663 | import urllib.request as ul
from bs4 import B | eautifulSoup as bs
from time import sleep
import pdfkit
import os.path
from datetime import datetime
url = None
def converter(url,direc=None):
path = r'./wkhtmltopdf.exe'
if not direc:
direc = "./Website2Pdf"
else:
direc = direc + "/Website2Pdf"
if not os.path | .exists(direc):
os.makedirs(direc)
config = pdfkit.configuration(wkhtmltopdf = path)
name = url.split('/')[-2]+".pdf"
fname = direc+'/'+url.split('/')[-2]+".pdf"
#print(fname)
pdf = pdfkit.from_url(url,fname,configuration = config)
#print("Done")
return True,name
|
namili/blueman | blueman/plugins/applet/StatusIcon.py | Python | gpl-3.0 | 3,189 | 0.039511 | # Copyright (C) 2008 Valmantas Paliksa <walmis at balticum-tv dot lt>
# Copyright (C) 2008 Tadas Dailyda <tadas at dailyda dot com>
#
# Licensed under the GNU General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from blueman.Functions import *
from blueman.plugins.AppletPlugin import AppletPlugin
import gtk
import gobject
class StatusIcon(AppletPlugin, gtk.StatusIcon):
__unloadable__ = False
FORCE_SHOW = 2
SHOW = 1
| FORCE_HIDE = 0
def on_load(self, applet):
gtk.StatusIcon.__init__(self)
self.lines = {}
self.pixbuf = None
| self.connect("size-changed", self.on_status_icon_resized)
self.SetTextLine(0, _("Bluetooth Enabled"))
AppletPlugin.add_method(self.on_query_status_icon_visibility)
AppletPlugin.add_method(self.on_status_icon_pixbuf_ready)
def on_bluetooth_power_state_changed(self, state):
if state:
self.SetTextLine(0, _("Bluetooth Enabled"))
else:
self.SetTextLine(0, _("Bluetooth Disabled"))
self.Query()
def Query(self):
if not self.Applet.Manager:
self.props.visible = False
return
rets = self.Applet.Plugins.Run("on_query_status_icon_visibility")
if not StatusIcon.FORCE_HIDE in rets:
if StatusIcon.FORCE_SHOW in rets:
self.props.visible = True
else:
try:
if self.Applet.Manager.ListAdapters() == []:
self.props.visible = False
else:
self.props.visible = True
except:
self.props.visible = False
else:
self.props.visible = False
def SetTextLine(self, id, text):
if text:
self.lines[id] = text
else:
try:
del self.lines[id]
except:
pass
self.update_tooltip()
def update_tooltip(self):
s = ""
keys = self.lines.keys()
keys.sort()
for k in keys:
s += self.lines[k] + "\n"
self.props.tooltip_markup = s[:-1]
def IconShouldChange(self):
self.on_status_icon_resized(self, self.props.size)
def on_adapter_added(self, path):
self.Query()
def on_adapter_removed(self, path):
self.Query()
def on_manager_state_changed(self, state):
self.Query()
def on_status_icon_resized(self, statusicon, size):
self.pixbuf = get_icon("blueman-tray", size, fallback="blueman")
def callback(inst, ret):
if isinstance(ret, gtk.gdk.Pixbuf):
self.pixbuf = ret
return (self.pixbuf,)
self.Applet.Plugins.RunEx("on_status_icon_pixbuf_ready", callback, self.pixbuf)
self.set_from_pixbuf(self.pixbuf)
return True
def on_query_status_icon_visibility(self):
return StatusIcon.SHOW
def on_status_icon_pixbuf_ready(self, pixbuf):
return False
|
KunihikoKido/sublime-elasticsearch-client | commands/indices_get_warmer.py | Python | mit | 460 | 0 | from .base import BaseCommand
class IndicesGetWarmerCommand(BaseCommand | ):
command_name = "elasticsearch:indices-get-warmer"
def is_enabled(self):
return True
def run_request(self, name=None):
if not name:
self.show_warmer_list_panel(self.run)
return
options = dict(
index=self.sett | ings.index,
name=name
)
return self.client.indices.get_warmer(**options)
|
jiadaizhao/LeetCode | 1501-1600/1502-Can Make Arithmetic Progression From Sequence/1502-Can Make Arithmetic Progression From Sequence.py | Python | mit | 264 | 0 | class Solution:
def canMakeArithmeticProgression(self, arr: List[int]) -> bool:
arr.sort()
diff = arr[1] - arr[0]
for i in range(2, len(arr)):
if arr[i] - arr[i - | 1] != diff:
return False
| return True
|
ironfroggy/django-better-cache | bettercache/tests/test_views.py | Python | mit | 1,632 | 0.001225 | from unittest import TestCase
try:
from unittest import mock
except ImportError:
import mock
from bettercache.view | s import BetterView
class TestView(TestCase):
def | setUp(self):
self.view = BetterView()
self.view.should_bypass_cache = lambda x: False
self.view.get_cache = lambda x: (None, None, )
self.view.set_cache = lambda x, y: True
self.request = mock.Mock()
self.request.build_absolute_uri = lambda : '_'
@mock.patch('bettercache.views.proxy')
def test_miss(self, proxy):
''' make sure we proxy when there is no cache '''
proxy.return_value = {}
self.view.get(self.request)
self.assertTrue(proxy.called)
@mock.patch('bettercache.views.strip_wsgi')
@mock.patch('bettercache.views.proxy')
def test_notexpired(self, proxy, strip_wsgi):
''' make sure we don't send off a task if it's not expired '''
self.view.get_cache = lambda x: ({}, False, )
self.view.send_task = mock.Mock()
self.view.get(self.request)
self.assertFalse(self.view.send_task.called)
self.assertFalse(proxy.called)
@mock.patch('bettercache.views.strip_wsgi')
@mock.patch('bettercache.views.proxy')
def test_expired(self, proxy, strip_wsgi):
''' make sure that when it's expired the task is sent '''
self.view.should_bypass_cache = lambda x: False
self.view.send_task = mock.Mock()
self.view.get_cache = lambda x: ({}, True, )
self.view.get(self.request)
self.assertTrue(self.view.send_task.called)
self.assertFalse(proxy.called)
|
simras/CLAP | scripts/mk_ErrorModel.py | Python | mit | 1,940 | 0.028351 | #!/usr/bin/python
# mk_ErrorModel.py -m 0.125
# Example
# By Simon H. Rasmussen
# Bioinformatics Centre
# University of Copenhagen
#
def wLines(mutP):
# range of qualities 0...41
for qual in range(42):
for base in ["A","C","G","T"]:
# quality base P(a|base) P(c|base) P(g|base) P(t|base)
print qual, base, item(qual,"A",base,mutP),item(qual,"C",base,mutP),item(qual,"G",base,mutP),item(qual,"T",base,mutP)
def item(qual,base,obsBase,mutP):
import math
bg = [0.25,0.25,0.25,0.25]
logC = 1/math.log(2)
return logC * math.log(sumProb(qual,base,obsBase,mutP)/bg[base2I(base)])
def base2I(base):
if base == "A":
return 0
elif base == "C":
return 1
elif base == "G":
return 2
elif base == "T":
return 3
def error(qual,f,to):
qual = int(qual)
quals = "!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJ"
if f == to:
# no error
return 1 - 10**(-qual/10.0)
else:
# an error
return 10**(-qual/10.0)/3
def sumProb(qual,f,to,mutP):
# sums the probabilities
# regularizing probability
pr = 0.0000001
for a in ["A","C","G","T"]:
pr = pr + tTOc(f,a,mutP) * error(qual,a,to)
return pr
def tTOc(f,to,mutP):
# Calc probability that the base to is a mutation of f. P(to|f)
p = mutP
if f == "T" and to == "C":
return p
elif f == "C" and to == "C":
re | turn 1 - p
elif f == to:
# No mutation
return 1
else:
# No mutation and f and to are different that's impossible
return 0
if __name__ == "__main__":
from optparse impor | t OptionParser
parser = OptionParser()
parser.add_option("-m", action="store", type="float", dest="mutP", default=0.125, help="P(\"t\"|\"c\"): t to c mutation probability")
(options, args) = parser.parse_args()
wLines(options.mutP)
|
datakid/tvet | tafe/migrations/0058_auto__chg_field_timetable_year.py | Python | gpl-3.0 | 25,222 | 0.007692 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Timetable.year'
db.alter_column('tafe_timetable', 'year', self.gf('django.db.models.fields.CharField')(max_length=4))
def backwards(self, orm):
# Changing field 'Timetable.year'
db.alter_column('tafe_timetable', 'year', self.gf('django.db.models.fields.IntegerField')())
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Perm | ission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
| 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tafe.applicant': {
'Meta': {'ordering': "['first_name', 'surname']", 'object_name': 'Applicant'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'applied_for': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applicants'", 'to': "orm['tafe.Course']"}),
'date_of_application': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_offer_accepted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_offer_sent': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'disability': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'disability_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {}),
'education_level': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'eligibility': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'experience': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'F'", 'max_length': "'1'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'island': ('django.db.models.fields.CharField', [], {'default': "'Tarawa'", 'max_length': "'10'", 'null': 'True', 'blank': 'True'}),
'last_change_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'applicant_last_change_by'", 'null': 'True', 'to': "orm['auth.User']"}),
'other_courses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'penultimate_change_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'applicant_penultimate_change_by'", 'null': 'True', 'to': "orm['auth.User']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}),
'phone2': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}),
'ranking': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'short_listed': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '40', 'blank': 'True'}),
'student_details': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tafe.Student']", 'null': 'True', 'blank': 'True'}),
'successful': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'test_ap': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'test_eng': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'test_ma': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'tafe.assessment': {
'Meta': {'object_name': 'Assessment'},
'date_due': ('django.db.models.fields.DateField', [], {}),
'date_given': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assessments'", 'to': "orm['tafe.Subject']"})
},
'tafe.course': {
'Meta': {'object_name': 'Course'},
'aqf_level': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'course_code': ('django.db.mo |
chocoelho/twitter-activities-monitor | users/models.py | Python | mit | 1,296 | 0 | from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.db import models
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
from common.models import IndexedTimeStampedModel
from tweets.models import Timeline
from .managers import UserManager
class User(AbstractBaseUser, PermissionsMixin, IndexedTimeStampedModel):
email = models.EmailField(max_length=255)
username = models.CharField(max_length=15, unique=True)
is_staff = models.BooleanField(
default=False,
help_text=_ | ('Designates whether the user can log into this admin '
'site.'))
is_active = models.Boole | anField(
default=True,
help_text=_('Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
objects = UserManager()
USERNAME_FIELD = 'username'
def get_full_name(self):
return self.email
def get_short_name(self):
return self.email
def __unicode__(self):
return self.email
def assure_user_timeline_creation(sender, instance, created, **kwargs):
Timeline.objects.get_or_create(user=instance)
post_save.connect(assure_user_timeline_creation, sender=User)
|
orlenko/plei | pleiapp/admin.py | Python | bsd-2-clause | 5,888 | 0.005944 | from copy import deepcopy
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.admin import DisplayableAdmin, OwnableAdmin
from pleiapp.models import (FrontPageItem,
Type,
Category,
Topic,
Resource,
Faq,
Dictionary,
Tagline)
resource_fieldsets = deepcopy(DisplayableAdmin.fieldsets)
resource_fieldsets[0][1]["fields"].insert(1, "categories")
resource_fieldsets[0][1]["fields"].insert(1, "types")
resource_fieldsets[0][1]["fields"].insert(1, "topics")
resource_fieldsets[0][1]["fields"].insert(1, "author")
resource_fieldsets[0][1]["fields"].extend(["content", ])
resource_fieldsets[0][1]["fields"].extend(["toc","video_url","audio_file","link_url","attached_document",])
resource_list_display = ["title", "user", "status", "admin_link"]
resource_fieldsets[0][1]["fields"].insert(-2, "featured_image")
resource_list_display.insert(0, "admin_thumb")
resource_fieldsets = list(resource_fieldsets)
resource_list_filter = deepcopy(DisplayableAdmin.list_filter) + ("categories", "types", "topics", )
resource_fieldsets.insert(1, (_("Related"), {
"classes": ("collapse-closed",),
"fields": ("detect_automatically", "related_resources","related_faqs","related_dictionary","searchable_text")}))
class ResourceAdmin(DisplayableAdmin, OwnableAdmin):
fieldsets = resource_fieldsets
list_display = resource_list_display
list_filter = resource_list_filter
filter_horizontal = ("categories", "types", "topics", "related_resources", "related_dictionary", "related_faqs")
readonly_fields = ('searchable_text',)
class Media:
css = {
'all': ('/static/css/admin.css',)
}
def save_form(self, request, form, change):
"""
Super class ordering is important here - user must get saved first.
"""
OwnableAdmin.save_form(self, request, form, change)
return DisplayableAdmin.save_form(self, request, form, change)
def save_related(self, request, form, formsets, change):
retval = super(ResourceAdmin, self).save_related(request, form, formsets, change)
if form.instance.detect_automatically:
form.instance.detect_automatically = False
form.instance.save()
form.instance.detect_related()
return retval
faq_fieldsets = deepcopy(DisplayableAdmin.fieldsets)
faq_fieldsets[0][1]["fields"].insert(1, "categories")
faq_fieldsets[0][1]["fields"].insert(1, "types")
faq_fieldsets[0][1]["fields"].insert(1, "topics")
faq_fieldsets[0][1]["fields"].extend(["question_details", "content", ])
faq_fieldsets[0][1]["fields"].insert(-2, "featured_image")
faq_fieldsets = list(faq_fieldsets)
faq_fieldsets.insert(1, (_("Related"), {
"classes": ("collapse-closed",),
"fields": ("detect_automatically", "related_resources","related_faqs","related_dictionary","searchable_text",)}))
class FaqAdmin(DisplayableAdmin, OwnableAdmin):
fieldsets = faq_fieldsets
list_display = resource_list_display
list_filter = resource_list_filter
filter_horizontal = ("categories", "types", "topics", "related_resources","related_faqs","related_dictionary")
readonly_fields = ('searchable_text',)
def save_form(self, request, form, change):
"""
Super class ordering is important here - user must get saved first.
"""
OwnableAdmin.save_form(self, request, form, change)
return DisplayableAdmin.save_form(self, request, form, change)
def save_related(self, request, form, formsets, change):
retval = super(FaqAdmin, self).save_related(request, form, formsets, change)
if form.instance.detect_automatically:
form.instance.detect_automatically = False
form.instance.save()
form.instance.detect_related()
return retval
dictionary_fieldsets = deepcopy(DisplayableAdmin.fieldsets)
dictionary_fieldsets[0][1]["fields"].extend(["content", ])
dictionary_fieldsets[0][1]["fields"].insert(-2, "featured_image")
dictionary_fieldsets = list(dictionary_fieldsets)
dictionary_fieldsets.insert(1, (_("Related"), {
"classes": ("collapse-closed",),
"fields": ("related_resources","related_faqs","related_dictionary","searchable_text",)}))
class DictionaryAdmin(DisplayableAdmin, OwnableAdmin):
fieldsets = dictionary_fieldsets
list_display = resource_list_display
list_filter = deepcopy(DisplayableAdmin.list_filter)
filter_horizontal = ("related_resources","related_faqs","related_dictionary")
readonly_fields = ('searchable_text',)
def save_form(self, request, form, change):
"""
Super class ordering is important here - user must get saved first.
"""
OwnableAdmin.save_form(self, request, form, change)
return DisplayableAdmin.save_form(self, request, form, change)
class CategoryAdmin(admin.ModelAdmin):
list_display =['__str__', 'visible']
list_filter = ['visible',]
class TypeAdmin(admin.ModelAdmin):
list_display =['__str__', 'visible']
list_filter = ['visible',]
class TopicAdmin(admin.ModelAdmin):
| list_display =['__str__', | 'visible']
list_filter = ['visible',]
class FrontPageItemAdmin(admin.ModelAdmin):
list_display = ['__str__', 'admin_thumb', 'visible']
list_filter = ['visible',]
admin.site.register(Resource, ResourceAdmin)
admin.site.register(Faq, FaqAdmin)
admin.site.register(Dictionary, DictionaryAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Type, TypeAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(FrontPageItem, FrontPageItemAdmin)
admin.site.register(Tagline, admin.ModelAdmin)
|
sridevikoushik31/nova | nova/db/sqlalchemy/utils.py | Python | apache-2.0 | 10,646 | 0.000376 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 Boris Pavlovic (boris@pavlovic.me).
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the spec | ific language governing permissions and limitatio | ns
# under the License.
from migrate.changeset import UniqueConstraint
from sqlalchemy.engine import reflection
from sqlalchemy.ext.compiler import compiles
from sqlalchemy import func
from sqlalchemy import MetaData, Table, Column, Index
from sqlalchemy.sql.expression import UpdateBase, literal_column
from sqlalchemy.sql import select
from sqlalchemy.types import NullType
from nova.db.sqlalchemy import api as db
from nova import exception
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
LOG = logging.getLogger(__name__)
def get_table(engine, name):
"""Returns an sqlalchemy table dynamically from db.
Needed because the models don't work for us in migrations
as models will be far out of sync with the current data."""
metadata = MetaData()
metadata.bind = engine
return Table(name, metadata, autoload=True)
class InsertFromSelect(UpdateBase):
def __init__(self, table, select):
self.table = table
self.select = select
@compiles(InsertFromSelect)
def visit_insert_from_select(element, compiler, **kw):
return "INSERT INTO %s %s" % (
compiler.process(element.table, asfrom=True),
compiler.process(element.select))
def _get_not_supported_column(col_name_col_instance, column_name):
try:
column = col_name_col_instance[column_name]
except Exception as e:
msg = _("Please specify column %s in col_name_col_instance "
"param. It is required because column has unsupported "
"type by sqlite).")
raise exception.NovaException(msg % column_name)
if not isinstance(column, Column):
msg = _("col_name_col_instance param has wrong type of "
"column instance for column %s It should be instance "
"of sqlalchemy.Column.")
raise exception.NovaException(msg % column_name)
return column
def _drop_unique_constraint_in_sqlite(migrate_engine, table_name, uc_name,
**col_name_col_instance):
insp = reflection.Inspector.from_engine(migrate_engine)
meta = MetaData(bind=migrate_engine)
table = Table(table_name, meta, autoload=True)
columns = []
for column in table.columns:
if isinstance(column.type, NullType):
new_column = _get_not_supported_column(col_name_col_instance,
column.name)
columns.append(new_column)
else:
columns.append(column.copy())
constraints = [constraint for constraint in table.constraints
if not constraint.name == uc_name]
new_table = Table(table_name + "__tmp__", meta, *(columns + constraints))
new_table.create()
indexes = []
for index in insp.get_indexes(table_name):
column_names = [new_table.c[c] for c in index['column_names']]
indexes.append(Index(index["name"],
*column_names,
unique=index["unique"]))
ins = InsertFromSelect(new_table, table.select())
migrate_engine.execute(ins)
table.drop()
[index.create(migrate_engine) for index in indexes]
new_table.rename(table_name)
def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns,
**col_name_col_instance):
"""
This method drops UC from table and works for mysql, postgresql and sqlite.
In mysql and postgresql we are able to use "alter table" constuction. In
sqlite is only one way to drop UC:
1) Create new table with same columns, indexes and constraints
(except one that we want to drop).
2) Copy data from old table to new.
3) Drop old table.
4) Rename new table to the name of old table.
:param migrate_engine: sqlalchemy engine
:param table_name: name of table that contains uniq constarint.
:param uc_name: name of uniq constraint that will be dropped.
:param columns: columns that are in uniq constarint.
:param col_name_col_instance: contains pair column_name=column_instance.
column_instance is instance of Column. These params
are required only for columns that have unsupported
types by sqlite. For example BigInteger.
"""
if migrate_engine.name in ["mysql", "postgresql"]:
meta = MetaData()
meta.bind = migrate_engine
t = Table(table_name, meta, autoload=True)
uc = UniqueConstraint(*columns, table=t, name=uc_name)
uc.drop()
else:
_drop_unique_constraint_in_sqlite(migrate_engine, table_name, uc_name,
**col_name_col_instance)
def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
use_soft_delete, *uc_column_names):
"""
This method is used to drop all old rows that have the same values for
columns in uc_columns.
"""
meta = MetaData()
meta.bind = migrate_engine
table = Table(table_name, meta, autoload=True)
columns_for_group_by = [table.c[name] for name in uc_column_names]
columns_for_select = [func.max(table.c.id)]
columns_for_select.extend(list(columns_for_group_by))
duplicated_rows_select = select(columns_for_select,
group_by=columns_for_group_by,
having=func.count(table.c.id) > 1)
for row in migrate_engine.execute(duplicated_rows_select):
# NOTE(boris-42): Do not remove row that has the biggest ID.
delete_condition = table.c.id != row[0]
for name in uc_column_names:
delete_condition &= table.c[name] == row[name]
rows_to_delete_select = select([table.c.id]).where(delete_condition)
for row in migrate_engine.execute(rows_to_delete_select).fetchall():
LOG.info(_("Deleted duplicated row with id: %(id)s from table: "
"%(table)s") % dict(id=row[0], table=table_name))
if use_soft_delete:
delete_statement = table.update().\
where(delete_condition).\
values({
'deleted': literal_column('id'),
'updated_at': literal_column('updated_at'),
'deleted_at': timeutils.utcnow()
})
else:
delete_statement = table.delete().where(delete_condition)
migrate_engine.execute(delete_statement)
def check_shadow_table(migrate_engine, table_name):
"""
This method checks that table with ``table_name`` and corresponding shadow
table have same columns.
"""
meta = MetaData()
meta.bind = migrate_engine
table = Table(table_name, meta, autoload=True)
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
autoload=True)
columns = dict([(c.name, c) for c in table.columns])
shadow_columns = dict([(c.name, c) for c in shadow_table.columns])
for name, column in columns.iteritems():
if name not in shadow_columns:
raise exception.NovaException(
_("Missing column %(table)s.%(column)s in shadow table")
% {'column': name, 'table': shadow_table.name})
shadow_column = shadow_columns[name]
if not isinstance(shadow_column.type, type(colum |
chipx86/reviewboard | reviewboard/diffviewer/models/legacy_file_diff_data.py | Python | mit | 837 | 0 | """LegacyFileDiffData model defitnition."""
from __future__ import unicode_literals
from django.db import models
from d | jango.utils.translation import ugettext_lazy as _
from djblets.db.fields import Base64Field, JSONField
class LegacyFileDiffData(models.Model):
"""Deprecated, legacy class for base64-encoded diff data.
This is no longer populated, and e | xists solely to store legacy data
that has not been migrated to :py:class:`RawFileDiffData`.
"""
binary_hash = models.CharField(_('hash'), max_length=40, primary_key=True)
binary = Base64Field(_('base64'))
extra_data = JSONField(null=True)
class Meta:
app_label = 'diffviewer'
db_table = 'diffviewer_filediffdata'
verbose_name = _('Legacy File Diff Data')
verbose_name_plural = _('Legacy File Diff Data Blobs')
|
benkuhn/benkuhn.net | bknet/wsgi.py | Python | mit | 1,132 | 0.000883 | """
WSGI config for bknet project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Us | ually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
f | ramework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bknet.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
vdemir/pisi_package | LXQT/addon/qterminal/actions.py | Python | gpl-3.0 | 607 | 0.00659 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt
from pisi.actionsapi import cmaketools |
from pisi.actionsapi import get
from pisi.actionsapi import pisitools
def setup():
cmaketools.configure("-DCMAKE_BUI | LD_TYPE:STRING=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=/usr/lib")
def build():
cmaketools.make()
def install():
cmaketools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("AUTHORS", "COPYING") |
ianfelzer1/ifelzer-advprog | ianfelzer/calculator.py | Python | gpl-3.0 | 167 | 0.041916 | def add(a,b):
| c = a + b
return c
def sub(a,b):
c = a - b
return c
def mul(a,b):
c = a * b |
return c
def div(a,b):
c = a / b
return c
|
jpush/jbox | Server/jbox/plugins/discourse.py | Python | mit | 3,219 | 0.00466 | import time
from flask import abort, Flask, jsonify, request
from . import plugins
from ..models import Developer, Integration
import jpush
from jpush import common
from .github import baseurl
@plugins.route('/discourse/<string:integration_id>/<string:token>/webhook', methods=['POST'])
def send_discourse_msg(integration_id, token):
print("discourse")
integration = Integration.query.filter_by(integration_id=integration_id, token=token).first()
if integration is None:
abort(400)
# channel dev_ID
developer = Developer.query.filter_by(id=integration.developer_id).first()
if developer is None:
abort(404)
_jpush = jpush.JPush(u'1c29cb5814072b5b1f8ef829', u'b46af6af73ee8f9480d4edad')
push = _jpush.create_push()
_jpush.set_logging("DEBUG")
push.audience = jpush.audience(
jpush.tag(developer.dev_key + '_' + integration.channel.channel)
)
# push.audience = jpush.all_
# push.notification = jpush.notification(alert=request.json['title'],extras={'title': request.json['title'],
#
message_url = ""
if 'url' in r | equest.json:
message_url = request.json['url']
print("the message url " + message_url)
android_msg = jpush.android(alert=request.json['title'], extras={'title': request.json['title'],
'message': request.json['message']})
ios_msg = jpush.ios(alert=request.json['title'], extras={'title': request.json['title'],
'message': request.json['message']} | )
# ios_msg = jpush.ios(alert=request.json['title'], extras={'title': request.json['title']})
print(integration.icon)
if integration.icon is None or len(integration.icon) == 0:
url = ''
else:
url = baseurl + '/static/images/' + integration.icon
push.notification = jpush.notification(alert=request.json['title'], android=android_msg, ios=ios_msg)
push.message = jpush.message(msg_content=request.json['message'], title=request.json['title'], content_type="tyope",
extras={'dev_key': developer.dev_key, 'channel': integration.channel.channel,
'datetime': int(time.time()),
'icon': url,
'url': message_url,
'integration_name': integration.name})
push.options = {"time_to_live": 864000, "sendno": 12345, "apns_production": True}
push.platform = jpush.all_
try:
response = push.send()
print(response)
except common.Unauthorized:
print("Unauthorized")
return jsonify({'error': 'Unauthorized request'}), 401
except common.APIConnectionException:
print('connect error')
return jsonify({'error': 'connect error, please try again later'}), 504
except common.JPushFailure:
print("JPushFailure")
response = common.JPushFailure.response
return jsonify({'error': 'JPush failed, please read the code and refer code document'}), 500
except:
print("Exception")
return jsonify({}), 200
|
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_10_01_preview/aio/operations/_cloud_service_roles_operations.py | Python | mit | 7,860 | 0.004198 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._cloud_service_roles_operations import build_get_request, build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class CloudServiceRolesOperations:
"""CloudServiceRolesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_10_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get(
self,
role_name: str,
resource_group_name: str,
cloud_service_name: str,
**kwargs: Any
) -> "_models.CloudServiceRole":
"""Gets a role from a cloud service.
:param role_name: Name of the role.
:type role_name: str
:param resource_group_name:
:type resource_group_name: str
:param cloud_service_name:
:type cloud_service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudServiceRole, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_10_01_preview.models.CloudServiceRole
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CloudServiceRole"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
role_name=role_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('CloudServiceRole', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roles/{roleName}'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
cloud_service_name: str,
**kwargs: Any
) -> AsyncIterable["_models.CloudServiceRoleListResult"]:
"""Gets a list of all roles in a cloud service. Use nextLink property in the response to get the
next page of roles. Do this till nextLink is null to fetch all the roles.
:param resource_group_name:
:type resource_group_name: str
:param cloud_service_name:
:type cloud_service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CloudServiceRoleListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2020_10_01_preview.models.CloudServiceRoleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CloudServiceRoleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_gr | oup_name=re | source_group_name,
cloud_service_name=cloud_service_name,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("CloudServiceRoleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/cloudServices/{cloudServiceName}/roles'} # type: ignore
|
tallus/pos-scale-emulator | swrapper.py | Python | gpl-3.0 | 20,895 | 0.009476 | #!/usr/bin/env python
'''Module that provides functionality to emulate pos scales with
serial interfaces'''
# Copyright Paul Munday 2013
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import serial
import random
import ConfigParser
import os
import unittest
# CLASS DEFINITIONS
class Scale():
"""has serial device, terminator byte,start byte, weight request,weight unit
A serial device takes the following:
Device (device) . e.g. /dev/ttyS3, /dev/pts/16
Speed (speed) in baud. Default is 9600
Byte Size in bits (byte_size). 5,6,7,8 default is 8, 7 is also used in
scales and represents true ascii. 8 is the modern standard.
Device Parity (dev_parity): Can be none, even,odd, mark or space. Typically
the first 3. A crude form of error checking. Odd means there are always an
odd number of ones in the byte.
Stop Bit (stop_bits): 1,2, or 1.5. 1.5 is read as 2 in posix compliant
systems. Normally 1. Sent at the end of a character to allow the hardware
to dectect the end of the character and resynchronize.
Flow Control can be none (default) or hardware or software. Pyserial uses
xonoxoff=1 for software control rtscts or dsrdtr for the diferent types of
hardware control.
There is also a read read timeout value (timeout) used by pyserial that
is passed before the flowcontrol settings. Defaults to none.
The following are specific to the scale class.
terminator_byte: sent by the scales to mark the end of the message.
Values include ASCII carriage return (CR) 0x0E,
ASCII Record Separator (RS) 0x1E.
weight_request. Scales operate by receiving single characters as commands.
This is the character that initiates a weight request.
weight_unit , k, g, lb
"""
def __init__(self, device='', speed = 9600, byte_size = '8',
dev_parity = 'none', stop_bits = '1', time_out = None,
flowcontrol = 'None', terminator_byte = '', start_byte = None,
weight_request='', weight_unit=''):
self.device = device
self.speed = speed
self.byte_size = byte_size
self.dev_parity = dev_parity
self.stop_bits = stop_bits
self.time_out = time_out
self.flowcontrol = flowcontrol
self.terminator_byte = terminator_byte
self.start_byte = start_byte
self.weight_request = weight_request
self.weight_unit = weight_unit
# define some dictionaries to address serial constants
d_bytesize = { '5' : serial.FIVEBITS, 'five' : serial.FIVEBITS, '6' : serial.SIXBITS, 'six' : serial.SIXBITS, '7' : serial.SEVENBITS, 'seven' : serial.SEVENBITS, '8' : serial.EIGHTBITS, 'eight' : serial.EIGHTBITS}
# versions of python < 2.7.(3) e.g.2.6.5 in Ubuntu 10.04 doen't have
# some of the more obscure options defined
if sys.version_info>(2,7,3):
d_parity = { 'none' : serial.PARITY_NONE, 'odd' : serial.PARITY_ODD , 'even' : serial.PARITY_EVEN, 'mark' : serial.PARITY_MARK, 'space' : serial.PARITY_SPACE }
else:
d_parity = { 'none' : serial.PARITY_NONE, 'odd' : serial.PARITY_ODD , 'even' : serial.PARITY_EVEN}
if sys.version_info>(2,7,3):
d_stopbits = { '1' : serial.STOPBITS_ONE, 'one' : serial.STOPBITS_ONE, '2' : serial.STOPBITS_TWO, 'two' : serial.STOPBITS_TWO, '1.5' : serial.STOPBITS_ONE_POINT_FIVE, 'one_point_five' : serial.STOPBITS_ONE_POINT_FIVE }
else:
d_stopbits = { '1' : serial.STOPBITS_ONE, 'one' : serial.STOPBITS_ONE, '2' : serial.STOPBITS_TWO, 'two' : serial.STOPBITS_TWO}
# set the values we use to initialize the serial port
self.port = device
b_size = str(byte_size)
self.bytesize = d_bytesize[b_size.lower()]
self.parity = d_parity[dev_parity.lower()]
s_bit = str(stop_bits)
self.stopbits = d_stopbits[s_bit.lower()]
self.timeout = time_out
self.baudrate = speed
# this wont' work as it doesn't get passed on correctly, see commented
# example above to h
try:
#Initializes serial port.
self.serial_port = serial.Serial(self.device, self.baudrate,
self.bytesize)
#, self.parity) , self.stopbits, self.timeout, self.flowcontrol)
except serial.SerialException:
print 'could not open serial device'
if flowcontrol == 'software':
self.serial_port.xonxoff = 1
elif flowcontrol == 'hardware':
self.serial_port.rtscts = 1
def send_weight(self, weight):
'''sends weight value to pos. N.B. Does not receive weight_request.
Note this does not attempt to check the weight value is
correctly formatted. This should be implented in a wrapper
function in the relevant class'''
# none of the defined scales in the pos use a start_byte AFAIK
if self.start_byte:
send_string = self.start_byte + weight + self.terminator_byte
else:
send_string = weight + self.terminator_byte
self.serial_port.write(send_string)
def read_weight(self, signal):
'''reads (weight)from the serial port on receiving
the appropriate signal'''
self.signal = signal
if self.signal == self.weight_request:
self.time_out = 10
self.serial_port.write(self.signal)
read_weight = bytearray()
while True:
read_byte = self.serial_port.read(1)
if read_byte == self.terminator_byte:
break
else:
read_weight.append(read_byte)
if self.start_byte:
weight = str(ord(read_weight[1:]))
else:
weight = str(ord(read_weight))
return weight
else:
| raise SignalException('expected: ' + self.weight_request
+ 'got: ' + str(ord(self.signal)))
# def get_weight(self,signal):
# self.signal = signal
# read_weight = read_weight(self.signal)
# # insert conversion here
# weight = read_weight
# return | weight
def pos_test(self, dummy_weight):
'''Tests to ensure we can receive the correct command from the pos,
and that it correctly receives the weight. Used for debugging.
You will need to manually initiate a weight request on the pos by
adding a product (marked scale in properties) to a customers items
and the checking it has been added correctly. Since we are just testing
we won't be acutually using any scales but sending a dummy value.
Note you won't receive an error if you send an incorrectly formatted
weight, though one will show in the pos. '''
try:
self.serial_port.open()
except:
print('could not open serial port')
receive = self.serial_port.read(1)
if receive == self.weight_request:
self.send_weight(dummy_weight)
print( 'sent ' + dummy_weight)
else:
print('expected ' + self.weight_request
+ ' got: ' + str(ord(receive)))
self.serial_port.close()
class Dialog(Scale):
"""extends Scale, corresponds to built in type Dialog1.
This will be used to communicate with the POS.
Weight Request 0x05 (ASCII STX: Start of TeXt). Weighs in grams. """
de |
data-driven-science/ddsm-cloud | common/__init__.py | Python | mit | 101 | 0 | # The c | ommon module
# from common import models
# from common import tools
# from common i | mport core
|
hyiltiz/cloudmusicbox | scripts/listdir.py | Python | gpl-3.0 | 213 | 0.093897 | import os
def recurdir():
cwd=os.getcwd();
dirlist=os.listdir(cwd)
for i in | dirlist:
if(os.path.isdir(i)):
os.chdir(cwd+'/'+i);
recurdir()
os.chdir(cwd);
else:
| print(cwd+'/'+i)
recurdir() |
tekton/DocuCanvas | accounts/migrations/0010_auto__add_usertemplates__add_unique_usertemplates_user_viewName.py | Python | gpl-3.0 | 8,389 | 0.008225 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserTemplates'
db.create_table(u'accounts_usertemplates', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('viewName', self.gf('django.db.models.fields.CharField')(max_length=255)),
('example_url', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('pathToTemplate', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal(u'accounts', ['UserTemplates'])
# Adding unique constraint on 'UserTemplates', fields ['user', 'viewName']
| db.create_unique(u'accounts_usertemplates', ['user_id', 'viewName'])
def backwards(self, orm):
# Removing unique constraint on 'UserTemplates', fields ['user', 'viewName']
db.delete_unique(u'accounts_usertemplates', ['user_id', 'viewName'])
# Deleting model 'UserTemplates'
db.delete_table(u'accounts_usertemplates')
models = {
u'accounts.account': {
'Meta': {'object_name': 'Account'},
| 'assignable': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'avatar': ('django.db.models.fields.CharField', [], {'default': "'/static/img/pony.png'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'git_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'github_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'google_plus': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'accounts.googleaccount': {
'Meta': {'object_name': 'GoogleAccount'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Account']", 'null': 'True'}),
'account_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'credentials': ('oauth2client.django_orm.CredentialsField', [], {'null': 'True'}),
'google_account_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'accounts.recordpermission': {
'Meta': {'unique_together': "(('contentType', 'user', 'recordID'),)", 'object_name': 'RecordPermission'},
'canDelete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canUpdate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canView': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'contentType': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recordID': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updatableFields': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'viewableFields': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
u'accounts.usertemplates': {
'Meta': {'unique_together': "(('user', 'viewName'),)", 'object_name': 'UserTemplates'},
'example_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pathToTemplate': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'viewName': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
brianzi/quantumsim | quantumsim/pauli_vectors/pauli_vector.py | Python | gpl-3.0 | 3,533 | 0 | import abc
import pytools
from quantumsim.algebra.algebra import dm_to_pv, pv_to_dm
class PauliVectorBase(metaclass=abc.ABCMeta):
"""A metaclass, that defines standard interface for Quantumsim density
matrix backend.
Every instance, that implements the interface of this class, should call
`super().__init__` in the beginning of its execution, because a lot of
sanity checks are done here.
Parameters
----------
bases : list of quantumsim.bases.PauliBasis
A descrption of the basis for the subsystems.
pv : array or None
Pauli vector, that represents the density matrix in the selected
bases. If `None`, density matrix is initialized in
:math:`\\left| 0 \\cdots 0 \\right\\rangle` state.
force : bool
By default creation of too large density matrix (more than
:math:`2^22` elements currently) is not allowed. Set this to `True`
if you know what you are doing.
"""
_size_max = 2**22
# noinspection PyUnusedLocal
@abc.abstractmethod
def __init__(self, bases, pv=None, *, force=False):
self.bases = list(bases)
if self.size > self._size_max and not force:
raise ValueError(
'Density matrix of the system is going to have {} items. It '
'is probably too much. If you know what you are doing, '
'pass `force=True` argument to the constructor.')
@classmethod
def from_pv(cls, pv, bases, *, force=False):
return cls(bases, pv, force=force)
@abc.abstractmethod
def to_pv(self):
"""Get data in a form of Numpy array"""
pass
@classmethod
def from_dm(cls, dm, bases, *, force=False):
if not hasattr(bases, '__iter__'):
n_qubits = len(dm) // bases.dim_hilbert
bases = [bases] * n_qubits
return cls(bases, dm_to_pv(dm, bases), force=force)
def to_dm(self):
return pv_to_dm(self.to_pv(), self.bases)
@property
def n_qubits(self):
return len(self.bases)
@property
def dim_hilbert(self):
return tuple((b.dim_hilbert for b in self.bases))
@property
def size(self):
return pytools.product(self.dim_hilbert) ** 2
@property
def dim_pauli(self):
return tuple([pb.dim_pauli for pb in self.bases])
@abc.abstractmethod
def apply_ptm(self, operation, *qubits):
pass
@abc.abstractmethod
def diagonal(self, *, get_data=True):
pass
@abc.abstractmethod
def trace(self):
pass
@abc.abstractmethod
def partial_trace(self, *qubits):
pass
@abc.abstractmethod
def meas_prob(self, qubit):
pass
@abc.abstr | actmethod
def renormalize(self):
pass
@abc.abstractmethod
def copy(self):
pass
def _validate_qubit(self, number, name):
if number < 0 or number >= self.n_qubits:
raise ValueError(
"`{name}` number {n} does not exist in the system, "
"it contains {n_qubits} qubits in total."
| .format(name=name, n=number, n_qubits=self.n_qubits))
# noinspection PyMethodMayBeStatic
def _validate_ptm_shape(self, ptm, target_shape, name):
if ptm.shape != target_shape:
raise ValueError(
"`{name}` shape must be {target_shape}, got {real_shape}"
.format(name=name,
target_shape=target_shape,
real_shape=ptm.shape))
|
RobAltena/deeplearning4j | libnd4j/include/graph/generated/nd4j/graph/UIHistogramType.py | Python | apache-2.0 | 176 | 0.005682 | # aut | omatically generated by the FlatBuffers compiler, do not modify
# namespace: graph
class UIHistogramType(object):
DISCRETE = 0
EQUAL_SPAC | ING = 1
CUSTOM = 2
|
razzius/PyClassLessons | instructors/projects/decoding_fun/examples/rot13.py | Python | mit | 2,202 | 0.008174 | """
A sample encryption library using rot13.
"""
# File encoding PEP, see: http://legacy.python.org/dev/peps/pep-0263/
# -*- coding: utf-8 -*-
import re
alphabet = 'abcdefghijklmnopqrstuvwxyz'
def assign_and_return_positions(alphabet, rotation):
""" Build and return a dictionary for substitution.
"""
i = 0
rotation_dict = dict()
while i < len(alphabet):
# The modulus function allows an arbitrary wrap on values
# greater than len(alphabet).
i_plus_rotation = (i + rotation) % len(alphabet)
rotation_dict[alphabet[i]] = alphabet[i_plus_rotation]
i += 1
| return rotation_dict
def apply_substitution(subst_dict, cleaned_string):
""" Apply a substitution dictionary to a string.
"""
encoded_string = ''
# Slightly confusing, t | he get function will get the value of the
# key named letter or will return letter.
for letter in cleaned_string:
letter = letter.lower()
if letter in subst_dict:
encoded_string += subst_dict.get(letter, letter)
else:
encoded_string += letter
return encoded_string
def clean_string(input_string):
""" Reduce the input string to lowercase letters some punctuation.
This is text munging. See the wikipedia article and python re docs.
"""
allowed_chars = r'[^a-z .,;:\-_0-9]'
repl_char = '' # delete... will this work with None?
return re.sub(allowed_chars,repl_char,input_string)
if __name__ == '__main__':
""" Perform rotational encryption on an input.
"""
alphabet = 'abcdefghijklmnopqrstuvwxyz'
rotation = 13
rotation_dict = assign_and_return_positions(alphabet, rotation)
input_string = raw_input("Enter a phrase to encode:")
#input_string = "12345 - Alphabet Soup; Acceptable Inputs: a-z, comma, period, colon, space, hyphen, underscore, and semicolon. Uppercase letters will be converted to lowercase."
cleaned_input = clean_string(input_string.lower())
print "Cleaned input:", cleaned_input
encoded_input = apply_substitution(rotation_dict, cleaned_input)
print "Encoded version:", encoded_input
|
omwomotieno/tunza_v3 | call/callback.py | Python | mit | 2,707 | 0.001108 | #!/usr/bin/python2.7
from django.http import HttpResponse
from AfricasTalkingGateway import AfricasTalkingGateway, AfricasTalkingGatewayException
from reminder.models import Reminder
import sys
import os
import django
sys.path.append("/home/foxtrot/Dropbox/tunza_v2/")
os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.local"
django.setup()
username = "OtisKe"
apikey = "07984423a278ead54fee35d3daf956598deb51405b27fe70f1e2dfe964be5c04"
gateway = AfricasTalkingGateway(username, apikey)
# replace this line with a list of numbers from the
# patient__patient_contact linked to reminder model
reminder_service = Reminder.objects.values_list('service_id',
'patient_id',
'service__service_name',
'service__service_url',
'patient__patient_contact', )
# replace this message with service about from service__service_about
# linked to reminder model
def voice_callback(request):
if request.method == 'POST':
is_active = request.values.get('isActive', None)
session_id = request.values.get('sessionId', None)
caller_number = request.values.get('callerNumber', None)
direction = request.values.get('direction', None)
print "is_active -> ", is_active
if is_active == str(0):
# Compose the response
duration = request.values.get('durationInSeconds', None)
currency_code = request.v | alues.get('currencyCode', None)
amount = request.values.get('amount', None)
# update session info to Redis
print duration, currency_code, amount
respond = '<?xml version="1.0" encoding="UTF-8"?>'
respond += '<Response>'
respond += '<Say playBeep="false" >Welcome to the | reminder system</Say>'
respond += '</Response>'
resp = HttpResponse(respond, 200, content_type='application/xml')
resp['Cache-Control'] = 'no-cache'
return resp
if is_active == str(1):
# Compose the response
respond = '<?xml version="1.0" encoding="UTF-8"?>'
respond += '<Response>'
respond += '<Say playBeep="false" >Welcome to mTunza.org</Say>'
respond += '</Response>'
resp = HttpResponse(respond, 200, content_type='application/xml')
resp['Cache-Control'] = 'no-cache'
return resp
else:
resp = HttpResponse('Bad Request', 400, content_type='application/xml', )
resp['Cache-Control'] = 'no-cache'
return resp
|
maxive/erp | addons/fleet/models/fleet_vehicle.py | Python | agpl-3.0 | 17,170 | 0.004601 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, _
from odoo.osv import expression
class FleetVehicle(models.Model):
_inherit = ['mail.thread', 'mail.activity.mixin']
_name = 'fleet.vehicle'
_description = 'Information on a vehicle'
_order = 'license_plate asc, acquisition_date asc'
def _get_default_state(self):
state = self.env.ref('fleet.fleet_vehicle_state_registered', raise_if_not_found=False)
return state and state.id or False
name = fields.Char(compute="_compute_vehicle_name", store=True)
active = fields.Boolean('Active', default=True, track_visibility="onchange")
company_id = fields.Many2one('res.company', 'Company')
license_plate = fields.Char(track_visibility="onchange",
help='License plate number of the vehicle (i = plate number for a car)')
vin_sn = fields.Char('Chassis Number', help='Unique number written on the vehicle motor (VIN/SN number)', copy=False)
driver_id = fields.Many2one('res.partner', 'Driver', track_visibility="onchange", help='Driver of the vehicle', copy=False)
model_id = fields.Many2one('fleet.vehicle.model', 'Model',
track_visibility="onchange", required=True, help='Model of the vehicle')
log_fuel = fields.One2many('fleet.vehicle.log.fuel', 'vehicle_id', 'Fuel Logs')
log_services = fields.One2many('fleet.vehicle.log.services', 'vehicle_id', 'Services Logs')
log_contracts = fields.One2many('fleet.vehicle.log.contract', 'vehicle_id', 'Contracts')
cost_count = fields.Integer(compute="_compute_count_all", string="Costs")
contract_count = fields.Integer(compute="_compute_count_all", string='Contract Count')
service_count = fields.Integer(compute="_compute_count_all", string='Services')
| fuel_logs_count = fields.Integer(compute="_compute_count_all", string='Fuel Log Count')
odometer_count = fields.Integer(compute="_compute_count_all", string='Odometer')
acquisition_date = fields.Date('Immatriculation Date', required=False,
default=fields.Date.today, help='Date when the vehicle has been immatriculated')
first_contrac | t_date = fields.Date(string="First Contract Date", default=fields.Date.today)
color = fields.Char(help='Color of the vehicle')
state_id = fields.Many2one('fleet.vehicle.state', 'State',
default=_get_default_state, group_expand='_read_group_stage_ids',
track_visibility="onchange",
help='Current state of the vehicle', ondelete="set null")
location = fields.Char(help='Location of the vehicle (garage, ...)')
seats = fields.Integer('Seats Number', help='Number of seats of the vehicle')
model_year = fields.Char('Model Year',help='Year of the model')
doors = fields.Integer('Doors Number', help='Number of doors of the vehicle', default=5)
tag_ids = fields.Many2many('fleet.vehicle.tag', 'fleet_vehicle_vehicle_tag_rel', 'vehicle_tag_id', 'tag_id', 'Tags', copy=False)
odometer = fields.Float(compute='_get_odometer', inverse='_set_odometer', string='Last Odometer',
help='Odometer measure of the vehicle at the moment of this log')
odometer_unit = fields.Selection([
('kilometers', 'Kilometers'),
('miles', 'Miles')
], 'Odometer Unit', default='kilometers', help='Unit of the odometer ', required=True)
transmission = fields.Selection([('manual', 'Manual'), ('automatic', 'Automatic')], 'Transmission', help='Transmission Used by the vehicle')
fuel_type = fields.Selection([
('gasoline', 'Gasoline'),
('diesel', 'Diesel'),
('electric', 'Electric'),
('hybrid', 'Hybrid')
], 'Fuel Type', help='Fuel Used by the vehicle')
horsepower = fields.Integer()
horsepower_tax = fields.Float('Horsepower Taxation')
power = fields.Integer('Power', help='Power in kW of the vehicle')
co2 = fields.Float('CO2 Emissions', help='CO2 emissions of the vehicle')
image = fields.Binary(related='model_id.image', string="Logo")
image_medium = fields.Binary(related='model_id.image_medium', string="Logo (medium)")
image_small = fields.Binary(related='model_id.image_small', string="Logo (small)")
contract_renewal_due_soon = fields.Boolean(compute='_compute_contract_reminder', search='_search_contract_renewal_due_soon',
string='Has Contracts to renew', multi='contract_info')
contract_renewal_overdue = fields.Boolean(compute='_compute_contract_reminder', search='_search_get_overdue_contract_reminder',
string='Has Contracts Overdue', multi='contract_info')
contract_renewal_name = fields.Text(compute='_compute_contract_reminder', string='Name of contract to renew soon', multi='contract_info')
contract_renewal_total = fields.Text(compute='_compute_contract_reminder', string='Total of contracts due or overdue minus one',
multi='contract_info')
car_value = fields.Float(string="Catalog Value (VAT Incl.)", help='Value of the bought vehicle')
residual_value = fields.Float()
@api.depends('model_id.brand_id.name', 'model_id.name', 'license_plate')
def _compute_vehicle_name(self):
for record in self:
record.name = record.model_id.brand_id.name + '/' + record.model_id.name + '/' + (record.license_plate or _('No Plate'))
def _get_odometer(self):
FleetVehicalOdometer = self.env['fleet.vehicle.odometer']
for record in self:
vehicle_odometer = FleetVehicalOdometer.search([('vehicle_id', '=', record.id)], limit=1, order='value desc')
if vehicle_odometer:
record.odometer = vehicle_odometer.value
else:
record.odometer = 0
def _set_odometer(self):
for record in self:
if record.odometer:
date = fields.Date.context_today(record)
data = {'value': record.odometer, 'date': date, 'vehicle_id': record.id}
self.env['fleet.vehicle.odometer'].create(data)
def _compute_count_all(self):
Odometer = self.env['fleet.vehicle.odometer']
LogFuel = self.env['fleet.vehicle.log.fuel']
LogService = self.env['fleet.vehicle.log.services']
LogContract = self.env['fleet.vehicle.log.contract']
Cost = self.env['fleet.vehicle.cost']
for record in self:
record.odometer_count = Odometer.search_count([('vehicle_id', '=', record.id)])
record.fuel_logs_count = LogFuel.search_count([('vehicle_id', '=', record.id)])
record.service_count = LogService.search_count([('vehicle_id', '=', record.id)])
record.contract_count = LogContract.search_count([('vehicle_id', '=', record.id),('state','!=','closed')])
record.cost_count = Cost.search_count([('vehicle_id', '=', record.id), ('parent_id', '=', False)])
@api.depends('log_contracts')
def _compute_contract_reminder(self):
for record in self:
overdue = False
due_soon = False
total = 0
name = ''
for element in record.log_contracts:
if element.state in ('open', 'expired') and element.expiration_date:
current_date_str = fields.Date.context_today(record)
due_time_str = element.expiration_date
current_date = fields.Date.from_string(current_date_str)
due_time = fields.Date.from_string(due_time_str)
diff_time = (due_time - current_date).days
if diff_time < 0:
overdue = True
total += 1
if diff_time < 15 and diff_time >= 0:
due_soon = True
total += 1
if overdue or due_soon:
log_contract = self.env['fleet.vehicle.log.contract'].search([
('vehicle_id', '=', record.id),
('state', 'in', ('open', 'expired'))
], limit=1, order='expiration_date a |
ameihm0912/MozDef | cron/healthToMongo.py | Python | mpl-2.0 | 5,358 | 0.00224 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez averez@mozilla.com
import logging
import requests
import sys
from datetime import datetime
from configlib import getConfig, OptionParser
from logging.handlers import SysLogHandler
from pymongo import MongoClient
import os
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib'))
from utilities.toUTC import toUTC
from elasticsearch_client import ElasticsearchClient
from query_models import SearchQuery, TermMatch
logger = logging.getLogger(sys.argv[0])
def loggerTimeStamp(self, record, datefmt=None):
return toUTC(datetime.now()).isoformat()
def initLogger():
logger.level = logging.INFO
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter.formatTime = loggerTimeStamp
if options.output == 'syslog':
logger.addHandler(
SysLogHandler(
address=(options.sysloghostname,
options.syslogport)))
else:
sh = logging.StreamHandler(sys.stderr)
sh.setFormatter(formatter)
logger.addHandler(sh)
def getFrontendStats(es):
search_query = SearchQuery(minutes=15)
search_query.add_must([
TermMatch('_type', 'mozdefhealth'),
TermMatch('category', 'mozdef'),
TermMatch('tags', 'latest'),
])
results = search_query.execute(es, indices=['events'])
return results['hits']
def writeFrontendStats(data, mongo):
# Empty everything before
mongo.healthfrontend.remove({})
for host in data:
for key in host['_source']['details'].keys():
# remove unwanted data
if '.' in key:
del host['_source']['details'][key]
mongo.healthfrontend.insert(host['_source'])
def writeEsClusterStats(data, mongo):
# Empty everything before
mongo.healthescluster.remove({})
mongo.healthescluster.insert(data)
def getEsNodesStats():
r = requests.get(options.esservers[0] + '/_nodes/stats/os,jvm,fs')
jsonobj = r.json()
results = []
for nodeid in jsonobj['nodes']:
# Skip non masters and data nodes since it won't have full stats
if ('attributes' in jsonobj['nodes'][nodeid] and
jsonobj['nodes'][nodeid]['attributes']['master'] == 'false' and
jsonobj['nodes'][nodeid]['attributes']['data'] == 'false'):
continue
results.append({
'hostname': jsonobj['nodes'][nodeid]['host'],
'disk_free': jsonobj['nodes'][nodeid]['fs']['total']['free_in_bytes'] / (1024 * 1024 * 1024),
'disk_total': jsonobj['node | s'][nodeid]['f | s']['total']['total_in_bytes'] / (1024 * 1024 * 1024),
'mem_heap_per': jsonobj['nodes'][nodeid]['jvm']['mem']['heap_used_percent'],
'cpu_usage': jsonobj['nodes'][nodeid]['os']['cpu_percent'],
'load': jsonobj['nodes'][nodeid]['os']['load_average']
})
return results
def writeEsNodesStats(data, mongo):
# Empty everything before
mongo.healthesnodes.remove({})
for nodedata in data:
mongo.healthesnodes.insert(nodedata)
def getEsHotThreads():
r = requests.get(options.esservers[0] + '/_nodes/hot_threads')
results = []
for line in r.text.split('\n'):
if 'cpu usage' in line:
results.append(line)
return results
def writeEsHotThreads(data, mongo):
# Empty everything before
mongo.healtheshotthreads.remove({})
for line in data:
mongo.healtheshotthreads.insert({'line': line})
def main():
logger.debug('starting')
logger.debug(options)
try:
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
client = MongoClient(options.mongohost, options.mongoport)
# use meteor db
mongo = client.meteor
writeFrontendStats(getFrontendStats(es), mongo)
writeEsClusterStats(es.get_cluster_health(), mongo)
writeEsNodesStats(getEsNodesStats(), mongo)
writeEsHotThreads(getEsHotThreads(), mongo)
except Exception as e:
logger.error("Exception %r sending health to mongo" % e)
def initConfig():
# output our log to stdout or syslog
options.output = getConfig('output', 'stdout', options.configfile)
# syslog hostname
options.sysloghostname = getConfig('sysloghostname', 'localhost',
options.configfile)
# syslog port
options.syslogport = getConfig('syslogport', 514, options.configfile)
# elastic search server settings
options.esservers = list(getConfig('esservers', 'http://localhost:9200',
options.configfile).split(','))
options.mongohost = getConfig('mongohost', 'localhost', options.configfile)
options.mongoport = getConfig('mongoport', 3001, options.configfile)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option(
"-c",
dest='configfile',
default=sys.argv[0].replace('.py', '.conf'),
help="configuration file to use")
(options, args) = parser.parse_args()
initConfig()
initLogger()
main()
|
miguel7penteado/python-qt4 | 6/6.py | Python | gpl-3.0 | 1,622 | 0.011097 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Miguel Suez Xve
"""
import sys
from PyQt4 import QtGui
"""=================================================================="""
""" CLASSE QUE DESENHA A JANELA """
"""=================================================================="""
""" Aqui | eu defino um objeto janela atraves de uma classe"""
class MinhaClasse(QtGui.QWidget):
""" Aqui estou chamando o construtor da classe pai"""
def __init__(self):
super(MinhaClasse, self).__init__()
self.initUI()
"""Desenhando a janela Principal"""
def initUI(self):
self.resize(250, 150)
self.center()
self.setWindowTitle('Centralizar')
self.show()
"""Centralizando | a Janela Principal"""
def center(self):
retangulo_janela = self.frameGeometry()
resolucao_tela = QtGui.QDesktopWidget().availableGeometry().center()
retangulo_janela.moveCenter(resolucao_tela)
self.move(retangulo_janela.topLeft())
"""=================================================================="""
"""=================================================================="""
""" Main """
"""=================================================================="""
def main():
minha_aplicacao = QtGui.QApplication(sys.argv)
objeto_jabela_principal = MinhaClasse()
sys.exit(minha_aplicacao.exec_())
if __name__ == '__main__':
main()
|
halober/ovirt-engine | packaging/setup/plugins/ovirt-engine-setup/ovirt-engine/upgrade/asynctasks.py | Python | apache-2.0 | 14,855 | 0.00074 | #
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" DB Async tasks handling plugin."""
import time
import gettext
_ = lambda m: gettext.dgettext(message=m, domain='ovirt-engine-setup')
from otopi import constants as otopicons
from otopi import util
from otopi import plugin
from otopi import base
from ovirt_engine_setup import dialog
from ovirt_engine_setup.engine import constants as oenginecons
from ovirt_engine_setup.engine_common import constants as oengcommcons
from ovirt_engine_setup.engine_common import database
from ovirt_engine_setup.engine import vdcoption
from async_tasks_map import ASYNC_TASKS_MAP
@util.export
class Plugin(plugin.PluginBase):
""" DB Async tasks handling plugin."""
class _engineInMaintenance(base.Base):
def __init__(
self,
parent,
dbstatement,
):
self._parent = parent
self._origTimeout = 0
self._dbstatement = dbstatement
def _getCurrentTimeout(self):
return vdcoption.VdcOption(
statement=self._dbstatement,
).getVdcOption(
name='AsyncTaskZombieTaskLifeInMinutes',
ownConnection=True,
)
def _setEngineMode(self, maintenance, timeout=0):
mode = (
'MAINTENANCE' if maintenance
else 'ACTIVE'
)
try:
self._parent.logger.debug(
'Setting engine into {mode} mode'.format(
mode=mode,
)
)
vdcoption.VdcOption(
statement=self._dbstatement,
).updateVdcOptions(
options=(
{
'name': 'EngineMode',
'value': mode,
},
{
'name': 'AsyncTaskZombieTaskLifeInMinutes',
'value': timeout,
},
),
ownConnection=True,
)
except Exception as e:
self._parent.logger.debug(
'Cannot set engine mode',
exc_info=True,
)
raise RuntimeError(
_(
'Failed to set engine to {mode} mode.\n'
'Error: {error}\n'
'Please check that engine is in correct state '
'and try running upgrade again.'
).format(
mode=mode,
error=e,
)
)
def __enter__(self):
self._origTimeout = self._getCurrentTimeout()
self._setEngineMode(
maintenance=True,
)
self._parent.services.state(
name=oenginecons.Const.ENGINE_SERVICE_NAME,
state=True,
)
def __exit__(self, exc_type, exc_value, traceback):
self._parent.services.state(
name=oenginecons.Const.ENGINE_SERVICE_NAME,
state=False,
)
self._setEngineMode(
maintenance=False,
timeout=self._origTimeout,
)
def _clearZombieTasks(self):
rc, tasks, stderr = self.execute(
args=(
oenginecons.FileLocations.OVIRT_ENGINE_TASKCLEANER,
'-l', self.environment[otopicons.CoreEnv.LOG_FILE_NAME],
'-u', self.environment[oenginecons.EngineDBEnv.USER],
'-s', self.environment[oenginecons.EngineDBEnv.HOST],
'-p', str(self.environment[oenginecons.EngineDBEnv.PORT]),
'-d', self.environment[oenginecons.EngineDBEnv.DATABASE],
'-R',
'-A',
'-J',
'-q',
),
raiseOnError=False,
envAppend={
'DBFUNC_DB_PGPASSFILE': self.environment[
oenginecons.EngineDBEnv.PGPASS_FILE
]
},
)
if rc:
raise RuntimeError(
_(
'Failed to clear zombie tasks. Please access support '
'in attempt to resolve the problem'
)
)
def _getRunningTasks(self, dbstatement):
tasks = dbstatement.execute(
statement="""
select
async_tasks.action_type,
async_tasks.task_id,
async_tasks.started_at,
storage_pool.name
from async_tasks, storage_pool
where async_tasks.storage_pool_id = storage_pool.id
""",
ownConnection=True,
transaction=False,
)
return (
[
_(
'Task ID: {task_id:30}\n'
'Task Name: {task_name:30}\n'
'Task Description: {task_desc:30}\n'
'Started at: {started_at:30}\n'
'DC Name: {name:30}'
).format(
task_id=entry['task_id'],
task_name=ASYNC_TASKS_MAP[entry['action_type']][0],
task_desc=ASYNC_TASKS_MAP[entry['action_type']][1],
started_at=entry['started_at'],
name=entry['name'],
)
for entry in tasks
]
)
def _getCommandEntitiesTableExists(self, dbstatement):
command_entities = dbstatement.execute(
statement="""
select relname
from pg_class
where relname = 'command_entities'
""",
ownConnection=True,
transaction=False,
)
return (
[
_(
'Relname: {relname:30}'
).format(
relna | me=entry['relname'],
)
for entry in command_entities
]
)
def _getRunningCommands(self, dbstatement):
if n | ot self._getCommandEntitiesTableExists(dbstatement):
return None
commands = dbstatement.execute(
statement="""
select
command_entities.command_type,
command_entities.command_id,
command_entities.created_at,
command_entities.status
from command_entities
where command_entities.callback_enabled = 'true'
and command_entities.callback_notified = 'false'
""",
ownConnection=True,
transaction=False,
)
return (
[
_(
'Command ID: {command_id:30}\n'
'Command Type: {command_type:30}\n'
'Created at: {created_at:30}\n'
'Status: {status:30}'
).format(
command_id=entry['command_id'],
command_type=entry['command_type'],
created_at=entry['created_at'],
status=entry['status'],
)
for entry in commands
]
)
def _getCompensations(self, dbstatement |
atmark-techno/atmark-dist | user/python/Lib/test/test_strop.py | Python | gpl-2.0 | 3,659 | 0.002733 | from test_support import verbose
import strop, sys
def test(name, input, output, *args):
if verbose:
print 'string.%s%s =? %s... ' % (name, (input,) + args, output),
f = getattr(strop, name)
try:
value = apply(f, (input,) + args)
except:
value = sys.exc_type
if value != output:
if verbose:
print 'no'
print f, `input`, `output`, `value`
else:
if verbose:
print 'yes'
test('atoi', " 1 ", 1)
test('atoi', " 1x", ValueError)
test('atoi', " x1 ", ValueError)
test('atol', " 1 ", 1L)
test('atol', " 1x ", ValueError)
test('atol', " x1 ", ValueError)
test('atof', " 1 ", 1.0)
test('atof', " 1x ", ValueError)
test('atof', " x1 ", ValueError)
test('capitalize', ' hello ', ' hello ')
test('capitalize', 'hello ', 'Hello ')
test('find', 'abcdefghiabc', 0, 'abc')
test('find', 'abcdefghiabc', 9, 'abc', 1)
test('find', 'abcdefghiabc', -1, 'def', 4)
test('rfind', 'abcdefghiabc', 9, 'abc')
test('lower', 'HeLLo', 'hello')
test('upper', 'HeLLo', 'HELLO')
transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
test('maketrans', 'abc', transtable, 'xyz')
test('maketrans', 'abc', ValueError, 'xyzq')
test('split', 'this is the split function',
['this', 'is', 'the', 'split', 'function'])
test('split', 'a|b|c|d', ['a', 'b', 'c', 'd'], '|')
test('split', 'a|b|c|d', ['a', 'b', 'c|d'], '|', 2)
test('split', 'a b c d', ['a', 'b c d'], None, 1)
test('split', 'a b c d', ['a', 'b', 'c d'], None, 2)
test('split', 'a b c d', ['a', 'b', 'c', 'd'], None, 3)
test('split', 'a b c d', ['a', 'b', 'c', 'd'], None, 4)
test('split', 'a b c d', ['a', 'b', 'c', 'd'], None, 0)
test('split', 'a b c d', ['a', 'b', 'c d'], None, 2)
# join now works with any sequence type
class Sequence:
def __init__(self): self.seq = 'wxyz'
def __len__(self): | return len(self.seq)
def __getitem__(self, i): return self.seq[i]
test('join', ['a', 'b', 'c', 'd'], 'a b c d')
test('join', ('a', 'b', 'c', 'd'), 'abcd', '')
test('join', Sequence(), 'w x y z')
# try a few long ones
print strop.join(['x' * 100] * 100, ':')
print strop.join(('x' * 100,) * 100, ':')
test('strip', ' hello ', 'hello')
test('lstrip', ' hello ', 'hello ')
test | ('rstrip', ' hello ', ' hello')
test('swapcase', 'HeLLo cOmpUteRs', 'hEllO CoMPuTErS')
test('translate', 'xyzabcdef', 'xyzxyz', transtable, 'def')
test('replace', 'one!two!three!', 'one@two!three!', '!', '@', 1)
test('replace', 'one!two!three!', 'one@two@three!', '!', '@', 2)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@', 3)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@', 4)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@', 0)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@')
test('replace', 'one!two!three!', 'one!two!three!', 'x', '@')
test('replace', 'one!two!three!', 'one!two!three!', 'x', '@', 2)
strop.whitespace
strop.lowercase
strop.uppercase
|
yuhangwang/Sigil | src/Resource_Files/plugin_launchers/python/inputcontainer.py | Python | gpl-3.0 | 2,521 | 0.008727 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Copyright (c) 2014 Kevin B. Hendricks, John Schember, and Doug Massay
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
# WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import unicode_literals, division, absolute_import, print_function
import sys
import os
from quickparser import QuickXHTMLParser
from pluginhunspell import HunspellChecker
from preferences import JSONPrefs
class ContainerException(Exception):
pass
class InputContainer(object):
def __init__(self, wrapper, debug = False):
self._debug = debug
self._w = wrapper
self.qp=QuickXHTMLParser()
self.hs | pell=HunspellChecker(wrapper.get_hunspell_path())
self.dictionary_dirs=wrapper.get_dictionary_dirs()
self._prefs_store = JSONPrefs(wrapper | .plugin_dir, wrapper.plugin_name)
def getPrefs(self):
return self._prefs_store
def savePrefs(self, user_copy):
self._prefs_store = user_copy
self._prefs_store._commit()
def launcher_version(self):
return self._w.getversion()
def addotherfile(self, book_href, data):
# creates a new file not in manifest with desired ebook root relative href
self._w.addotherfile(book_href, data)
|
avtomato/HackerRank | Python/_16_Numpy/_14_Polynomials/solution.py | Python | mit | 130 | 0 | import numpy
a | rr = numpy.array(list(map(float, input().split())))
x = float(input())
value = numpy.polyval(arr, x)
print(value)
| |
Fastcode/NUClearExample | nuclear/message/generator/OneOfField.py | Python | mit | 936 | 0 | #!/usr/bin/env python3
import stringcase
from generator.Field import Field
class OneOfField:
def __init__(self, oneof_name, oneof_fields, context):
self.name = oneof_name
# Some booleans to describe the type
self.one_of = True
self.type = 'OneOf{}'.format(stringcase.pascalcase(self.name))
self.fqn = '{}.{}'.format(context.fqn, self.type)
self.default_value = | '{}()'.format(self.type)
self.map_type = False
self.repeated = False
self.pointer = False
self.array_size = 0
self.bytes_type = False
self.basic = False
self.oneof_fields = [Field(f, context) for | f in oneof_fields]
# Since our cpp_type is used a lot, precalculate it
self.cpp_type = self.fqn.replace('.', '::')
self.special_cpp_type = False
def generate_cpp_header(self):
return '{} {};'.format(self.cpp_type, self.name)
|
Adam01/Cylinder-server | tests/test_file_content_procedures.py | Python | mit | 4,718 | 0 | import difflib
import shutil
__author__ = 'Adam'
import unittest
import os
import useful
class TestFileContentProcedures(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = "./tests/data/"
cls.text_file_name = "TextFile_UTF16_CRLF.txt"
cls.text_file_path = os.path.join(cls.test_dir, cls.text_file_name)
cls.text_file_encoding = "UTF-16BE"
cls.text_file_eol = "CRLF"
import codecs
with codecs.open(cls.text_file_path, 'rb',
encoding=cls.text_file_encoding) as f:
cls.text_file_contents = f.read()
cls.script_file_name = "ScriptFile_UTF8_LF.py"
cls.script_file_path = os.path.join(cls.test_dir, cls.script_file_name)
cls.script_file_encoding = "UTF-8"
cls.script_file_eol = "LF"
with codecs.open(cls.script_file_path, 'rb',
encoding=cls.script_file_encoding) as f:
cls.script_file_contents = f.read()
cls.set_contents = cls.text_file_contents
cls.set_name = "TestSetContents.txt"
cls.set_path = os.path.join(cls.test_dir, cls.set_name)
# diff testing
cls.diff_target_path = os.path.join(cls.test_dir, "ScriptFile_Copy.py")
shutil.copyfile(cls.script_file_path, cls.diff_target_path)
cls.diff_new_path = os.path.join(cls.test_dir,
"ScriptFile_Diff_Test.py")
with open(cls.diff_target_path, "rb") as f:
target_data = f.read().split("\n")
with open(cls.diff_new_path, "rb") as f:
new_data = f.read().split("\n")
diff_data = difflib.ndiff(target_data, new_data)
diff_data = list(diff_data)
cls.comp_diff_data = useful.make_comp_diff(diff_data)
@classmethod
def tearDownClass(cls):
# os.remove(cls.set_path)
# os.remove(cls.diff_target_path)
pass
'''
The system is required to be able to obtain the content of a file.
This test is successful if the content is matched as is with expected data.
'''
def test_get_file_contents(self):
from fsentity import FileSystemFile
script_file = FileSystemFile(self.script_file_path)
self.assertEquals(script_file.get_contents()[0],
self.script_file_contents)
text_file = FileSystemFile(self.text_file_path)
self.assertEquals(text_file.get_contents()[0], self.text_file_contents)
'''
The system must be able to set the contents of a file.
Test is successful if changes are made that match the expected outcome.
'''
def test_set_file_contents(self):
from fsentity import FileSystemDirectory
d = FileSystemDirectory(self.test_dir)
d.create_file(self.set_name, self.set_contents)
import codecs
with codecs.open(self.set_path, 'rb', encoding="utf-8") as f:
file_data = f.read()
# print file_data
self.assertEquals(file_data, self.set_contents)
'''
The system will need to update a file's contents from a differential
format.
The test is successful if the resulting file contents matches the result
of the original content with
a supplied delta.
'''
def test_set_file_from_diff(self):
from fsentity import FileSystemFile
target_file = FileSystemFile(self.diff_target_path)
diff_crc = FileSystemFile(self.diff_new_path).get_crc32()
self.assertTrue(target_file.set_from_comp_diff(self.comp_diff_data,
original_crc=diff_crc))
''' Identify byte encoding '''
def test_identify_encoding(self):
from fsentity import FileSystemFile
text_file = FileSystemFile(self.text_file_path)
self.assertEqual(
text_file.get_encoding().upper(),
self.text_file_encoding
)
script_file = FileSystemFile(self.script_file_path)
self.assertEqual(self.script_file_encoding,
script_file.get_encoding().upper())
''' Identify EOL format '''
def test_identify_lin | e_ending(self):
from fsentity import FileSystemFile
f = FileSystemFile(self.text_file_path)
self.assertEqual(self.text_file_eol, f.get_line_ending()[0])
f = FileSystemFile(self.script_file_path)
self.assertEqual(self.script_file_eol, f.get_line_ending()[0])
''' ... code style? '''
def test_identify_format(self):
from fsentity im | port FileSystemFile
lang = FileSystemFile(self.script_file_path).get_programming_language()
self.assertEqual("Python", lang)
|
onyb/mooca | MongoDB_University/M101P_MongoDB_for_Developers/Chapter_2_CRUD/pymongo_removing_data/remove_student_data.py | Python | mit | 1,002 | 0.011976 |
# Andrew Erlichson
# MongoDB, Inc.
# M101P - Copyright 2015, All Rights Reserved
import pymongo
import datetime
import sys
# establish a connection to the database
connection = pymongo.MongoClient("mongodb://localhost")
# removes one | student
def remove_student(student_id):
# get a handle to the school database
db=connection.school
scores = db.scores
try:
result = scores.delete_many({'student_id':student_id})
print "num removed: ", result.deleted_count
except Exception as e:
print "Exception: ", type(e), e
def | find_student_data(student_id):
# get a handle to the school database
db=connection.school
scores = db.scores
print "Searching for student data for student with id = ", student_id
try:
docs = scores.find({'student_id':student_id})
for doc in docs:
print doc
except Exception as e:
print "Exception: ", type(e), e
remove_student(1)
find_student_data(1)
|
NicovincX2/Python-3.5 | Algorithmique/Algorithme/Algorithme de la théorie des graphes/graph_functions.py | Python | gpl-3.0 | 746 | 0.002681 | # -*- coding: utf-8 -*-
import os
import random
# pour melanger les graphes...
def shuffle(G):
rand | om.shuffle(G[0])
for c in G[1].items():
random.shuffle(c[1])
# pour completer les graphes NON orientes...
def completer_graphe_no(G):
for x in G[0]:
f | or y in G[0][x]:
if not x in G[0][y]:
G[0][y].append(x)
# pour completer les graphes NON orientes VALUES...
def completer_graphe_nov(G):
for x in G[0]:
for (y, d) in G[1][x]:
if not (x, d) in G[1][y]:
G[1][y].append((x, d))
# pour calculer le poids d'un ensemble d'aretes (q,d,q'):
def Poids(A):
res = 0
for (x, d, y) in A:
res = res + d
return res
os.system("pause")
|
apyrgio/ganeti | lib/client/gnt_node.py | Python | bsd-2-clause | 40,549 | 0.00804 | #
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Node related commands"""
# pylint: disable=W0401,W0613,W0614,C0103
# W0401: Wildcard import ganeti.cli
# W0613: Unused argument, since all functions follow the same API
# W0614: Unused import %s from wildcard import (since we need cli)
# C0103: Invalid name gnt-node
import itertools
import errno
from ganeti.cli import *
from ganeti import cli
from ganeti import bootstrap
from ganeti import opcodes
from ganeti import utils
from ganeti import constants
from ganeti import errors
from ganeti import netutils
from ganeti import pathutils
from ganeti import ssh
from ganeti import compat
from ganeti import confd
from ganeti.confd import client as confd_client
#: default list of field for L{ListNodes}
_LIST_DEF_FIELDS = [
"name", "dtotal", "dfree",
"mtotal", "mnode", "mfree",
"pinst_cnt", "sinst_cnt",
]
#: Default field list for L{ListVolumes}
_LIST_VOL_DEF_FIELDS = ["node", "phys", "vg", "name", "size", "instance"]
#: default list of field for L{ListStorage}
_LIST_STOR_DEF_FIELDS = [
constants.SF_NODE,
constants.SF_TYPE,
constants.SF_NAME,
constants.SF_SIZE,
constants.SF_USED,
constants.SF_FREE,
constants.SF_ALLOCATABLE,
]
#: default list of power commands
_LIST_POWER_COMMANDS = ["on", "off", "cycle", "status"]
#: headers (and full field list) for L{ListStorage}
_LIST_STOR_HEADERS = {
constants.SF_NODE: "Node",
constants.SF_TYPE: "Type",
constants.SF_NAME: "Name",
constants.SF_SIZE: "Size",
constants.SF_USED: "Used",
constants.SF_FREE: "Free",
constants.SF_ALLOCATABLE: "Allocatable",
}
#: User-facing storage unit types
_USER_STORAGE_TYPE = {
constants.ST_FILE: "file",
constants.ST_LVM_PV: "lvm-pv",
constants.ST_LVM_VG: "lvm-vg",
constants.ST_SHARED_FILE: "sharedfile",
constants.ST_GLUSTER: "gluster",
}
_STORAGE_TYPE_OPT = \
cli_option("-t", "--storage-type",
dest="user_storage_type",
choices=_USER_STORAGE_TYPE.keys(),
default=None,
metavar="STORAGE_TYPE",
help=("Storage type (%s)" %
utils.CommaJoin(_USER_STORAGE_TYPE.keys())))
_REPAIRABLE_STORAGE_TYPES = \
[st for st, so in constants.VALID_STORAGE_OPERATIONS.iteritems()
if constants.SO_FIX_CONSISTENCY in so]
_MODIFIABLE_STORAGE_TYPES = constants.MODIFIABLE_STORAGE_FIELDS.keys()
_OOB_COMMAND_ASK = compat.UniqueFrozenset([
constants.OOB_POWER_OFF,
constants.OOB_POWER_CYCLE,
])
_ENV_OVERRIDE = compat.UniqueFrozenset(["list"])
NONODE_SETUP_OPT = cli_option("--no-node-setup", default=True,
action="store_false", dest="node_setup",
help=("Do not make initial SSH setup on remote"
" node (needs to be done manually)"))
IGNORE_STATU | S_OPT = cli_option("--ignore-status", default=False,
action="store_true", dest="ignore_status",
help=("Ignore the Node(s) offline status"
" (potentially DANGEROUS)"))
def ConvertStorageType(user_storage_type):
"""Conver | ts a user storage type to its internal name.
"""
try:
return _USER_STORAGE_TYPE[user_storage_type]
except KeyError:
raise errors.OpPrereqError("Unknown storage type: %s" % user_storage_type,
errors.ECODE_INVAL)
def _TryReadFile(path):
"""Tries to read a file.
If the file is not found, C{None} is returned.
@type path: string
@param path: Filename
@rtype: None or string
@todo: Consider adding a generic ENOENT wrapper
"""
try:
return utils.ReadFile(path)
except EnvironmentError, err:
if err.errno == errno.ENOENT:
return None
else:
raise
def _ReadSshKeys(keyfiles, _tostderr_fn=ToStderr):
"""Reads SSH keys according to C{keyfiles}.
@type keyfiles: dict
@param keyfiles: Dictionary with keys of L{constants.SSHK_ALL} and two-values
tuples (private and public key file)
@rtype: list
@return: List of three-values tuples (L{constants.SSHK_ALL}, private and
public key as strings)
"""
result = []
for (kind, (private_file, public_file)) in keyfiles.items():
private_key = _TryReadFile(private_file)
public_key = _TryReadFile(public_file)
if public_key and private_key:
result.append((kind, private_key, public_key))
elif public_key or private_key:
_tostderr_fn("Couldn't find a complete set of keys for kind '%s'; files"
" '%s' and '%s'", kind, private_file, public_file)
return result
def _SetupSSH(options, cluster_name, node, ssh_port, cl):
"""Configures a destination node's SSH daemon.
@param options: Command line options
@type cluster_name
@param cluster_name: Cluster name
@type node: string
@param node: Destination node name
@type ssh_port: int
@param ssh_port: Destination node ssh port
@param cl: luxi client
"""
# Retrieve the list of master and master candidates
candidate_filter = ["|", ["=", "role", "M"], ["=", "role", "C"]]
result = cl.Query(constants.QR_NODE, ["uuid"], candidate_filter)
if len(result.data) < 1:
raise errors.OpPrereqError("No master or master candidate node is found.")
candidates = [uuid for ((_, uuid),) in result.data]
candidate_keys = ssh.QueryPubKeyFile(candidates)
if options.force_join:
ToStderr("The \"--force-join\" option is no longer supported and will be"
" ignored.")
host_keys = _ReadSshKeys(constants.SSH_DAEMON_KEYFILES)
(_, root_keyfiles) = \
ssh.GetAllUserFiles(constants.SSH_LOGIN_USER, mkdir=False, dircheck=False)
root_keys = _ReadSshKeys(root_keyfiles)
(_, cert_pem) = \
utils.ExtractX509Certificate(utils.ReadFile(pathutils.NODED_CERT_FILE))
data = {
constants.SSHS_CLUSTER_NAME: cluster_name,
constants.SSHS_NODE_DAEMON_CERTIFICATE: cert_pem,
constants.SSHS_SSH_HOST_KEY: host_keys,
constants.SSHS_SSH_ROOT_KEY: root_keys,
constants.SSHS_SSH_AUTHORIZED_KEYS: candidate_keys,
}
ssh.RunSshCmdWithStdin(cluster_name, node, pathutils.PREPARE_NODE_JOIN,
ssh_port, data,
debug=options.debug, verbose=options.verbose,
use_cluster_key=False, ask_key=options.ssh_key_check,
strict_host_check=options.ssh_key_check)
fetched_keys = ssh.ReadRemoteSshPubKeys(root_keyfiles, node, cluster_name,
ssh_port, options.ssh_key_check,
options.ssh_key_check)
for pub_key in fetched_keys.values():
# Unfortunately, we have to add the key with the node name rather than
# the node's UUID here, because at this point, we do not have a UUID yet.
# The entry will be corrected in |
BeenzSyed/tempest | tempest/api/volume/test_volumes_get.py | Python | apache-2.0 | 6,033 | 0.000166 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest.test import attr
from tempest.test import services
class VolumesGetTest(base.BaseVolumeV1Test):
_interface = "json"
@classmethod
def setUpClass(cls):
super(VolumesGetTest, cls | ).setUpClass()
cls.client = cls.volumes_client
def _delete_volume(self, volume_id):
resp, _ = self.client.delete_volume(volume_id)
self.assertEqual(202, resp.status)
self.client.wait_for_resource_deletion(volume_id)
def _is_true(self, val):
# NOTE(jdg): Temporary conversion method to get cinder patch
# merged. Then we'll make this strict again and
#specifically check "true" or " | false"
if val in ['true', 'True', True]:
return True
else:
return False
def _volume_create_get_update_delete(self, **kwargs):
# Create a volume, Get it's details and Delete the volume
volume = {}
v_name = data_utils.rand_name('Volume')
metadata = {'Type': 'Test'}
# Create a volume
resp, volume = self.client.create_volume(size=1,
display_name=v_name,
metadata=metadata,
**kwargs)
self.assertEqual(200, resp.status)
self.assertIn('id', volume)
self.addCleanup(self._delete_volume, volume['id'])
self.assertIn('display_name', volume)
self.assertEqual(volume['display_name'], v_name,
"The created volume name is not equal "
"to the requested name")
self.assertTrue(volume['id'] is not None,
"Field volume id is empty or not found.")
self.client.wait_for_volume_status(volume['id'], 'available')
# Get Volume information
resp, fetched_volume = self.client.get_volume(volume['id'])
self.assertEqual(200, resp.status)
self.assertEqual(v_name,
fetched_volume['display_name'],
'The fetched Volume is different '
'from the created Volume')
self.assertEqual(volume['id'],
fetched_volume['id'],
'The fetched Volume is different '
'from the created Volume')
self.assertEqual(metadata,
fetched_volume['metadata'],
'The fetched Volume is different '
'from the created Volume')
# NOTE(jdg): Revert back to strict true/false checking
# after fix for bug #1227837 merges
boot_flag = self._is_true(fetched_volume['bootable'])
if 'imageRef' in kwargs:
self.assertEqual(boot_flag, True)
if 'imageRef' not in kwargs:
self.assertEqual(boot_flag, False)
# Update Volume
new_v_name = data_utils.rand_name('new-Volume')
new_desc = 'This is the new description of volume'
resp, update_volume = \
self.client.update_volume(volume['id'],
display_name=new_v_name,
display_description=new_desc)
# Assert response body for update_volume method
self.assertEqual(200, resp.status)
self.assertEqual(new_v_name, update_volume['display_name'])
self.assertEqual(new_desc, update_volume['display_description'])
# Assert response body for get_volume method
resp, updated_volume = self.client.get_volume(volume['id'])
self.assertEqual(200, resp.status)
self.assertEqual(volume['id'], updated_volume['id'])
self.assertEqual(new_v_name, updated_volume['display_name'])
self.assertEqual(new_desc, updated_volume['display_description'])
self.assertEqual(metadata, updated_volume['metadata'])
# NOTE(jdg): Revert back to strict true/false checking
# after fix for bug #1227837 merges
boot_flag = self._is_true(updated_volume['bootable'])
if 'imageRef' in kwargs:
self.assertEqual(boot_flag, True)
if 'imageRef' not in kwargs:
self.assertEqual(boot_flag, False)
@attr(type='gate')
def test_volume_get_metadata_none(self):
# Create a volume without passing metadata, get details, and delete
# Create a volume without metadata
volume = self.create_volume(metadata={})
# GET Volume
resp, fetched_volume = self.client.get_volume(volume['id'])
self.assertEqual(200, resp.status)
self.assertEqual(fetched_volume['metadata'], {})
@attr(type='smoke')
def test_volume_create_get_update_delete(self):
self._volume_create_get_update_delete()
@attr(type='smoke')
@services('image')
def test_volume_create_get_update_delete_from_image(self):
self._volume_create_get_update_delete(imageRef=self.
config.compute.image_ref)
@attr(type='gate')
def test_volume_create_get_update_delete_as_clone(self):
origin = self.create_volume()
self._volume_create_get_update_delete(source_volid=origin['id'])
class VolumesGetTestXML(VolumesGetTest):
_interface = "xml"
|
vismartltd/edx-platform | cms/envs/devstack.py | Python | agpl-3.0 | 3,429 | 0.004374 | """
Specific overrides to the base prod settings to make development easier.
"""
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
MEDIA_ROOT = "/edx/var/edxapp/uploads"
DEBUG = True
USE_I18N = True
TEMPLATE_DEBUG = DEBUG
################################ LOGGERS ######################################
import logging
# Disable noisy loggers
for pkg_name in ['track.contexts', 'track.middleware', 'dd.dogapi']:
logging.getLogger(pkg_name).setLevel(logging.CRITICAL)
################################ EMAIL ########################################
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
################################# LMS INTEGRATION #############################
LMS_BASE = "localhost:8000"
FEATURES['PREVIEW_LMS_BASE'] = "preview | ." + LMS_BASE
############################# ADVANCED COMPONENTS #############################
# Make it easier to test advanced components in local dev
FEATURES['ALLOW_ALL_ADVANCED_COMPONENTS'] = True
### | ############################## CELERY ######################################
# By default don't use a worker, execute tasks as if they were local functions
CELERY_ALWAYS_EAGER = True
################################ DEBUG TOOLBAR ################################
INSTALLED_APPS += ('debug_toolbar', 'debug_toolbar_mongo')
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.profiling.ProfilingPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': 'cms.envs.devstack.should_show_debug_toolbar'
}
def should_show_debug_toolbar(_):
return True # We always want the toolbar on devstack regardless of IP, auth, etc.
# To see stacktraces for MongoDB queries, set this to True.
# Stacktraces slow down page loads drastically (for pages with lots of queries).
DEBUG_TOOLBAR_MONGO_STACKTRACES = False
################################ MILESTONES ################################
FEATURES['MILESTONES_APP'] = True
################################ ENTRANCE EXAMS ################################
FEATURES['ENTRANCE_EXAMS'] = True
################################ COURSE LICENSES ################################
FEATURES['LICENSING'] = True
################################ SEARCH INDEX ################################
FEATURES['ENABLE_COURSEWARE_INDEX'] = True
FEATURES['ENABLE_LIBRARY_INDEX'] = True
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
###############################################################################
# See if the developer has any local overrides.
try:
from .private import * # pylint: disable=import-error
except ImportError:
pass
#####################################################################
# Lastly, run any migrations, if needed.
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
|
pankajp/pyface | pyface/workbench/perspective_item.py | Python | bsd-3-clause | 2,021 | 0.000495 | """ An item in a Perspective contents list. """
# Enthought library imports.
from traits.api import Enum, Float, HasTraits, provides, Str
# Local imports.
from i_perspective_item import IPerspectiveItem
@provides(IPerspectiveItem)
class PerspectiveItem(HasTraits):
""" An item in a Perspective contents list. """
# The Id of the view to display in the perspective.
id = Str
# The position of the view relative to the item specified in the
# 'relative_to' trait.
#
# 'top' puts the view above the 'relative_to' item.
# 'bottom' puts the view below the 'relative_to' item.
# 'left' puts the view to the left of th | e 'relative_to' item.
# 'right' puts the view to the right of the 'relative_to' item.
# 'with' puts the view in the same region as the 'relative_to' item.
#
# If the position is sp | ecified as 'with' you must specify a 'relative_to'
# item other than the editor area (i.e., you cannot position a view 'with'
# the editor area).
position = Enum('left', 'top', 'bottom', 'right', 'with')
# The Id of the view to position relative to. If this is not specified
# (or if no view exists with this Id) then the view will be placed relative
# to the editor area.
relative_to = Str
# The width of the item (as a fraction of the window width).
#
# e.g. 0.5 == half the window width.
#
# Note that this is treated as a suggestion, and it may not be possible
# for the workbench to allocate the space requested.
width = Float(-1)
# The height of the item (as a fraction of the window height).
#
# e.g. 0.5 == half the window height.
#
# Note that this is treated as a suggestion, and it may not be possible
# for the workbench to allocate the space requested.
height = Float(-1)
# The style of the dock control created.
style_hint = Enum('tab', 'vertical', 'horizontal', 'fixed')
#### EOF ######################################################################
|
dbrgn/django-filer | filer/templatetags/filer_admin_tags.py | Python | bsd-3-clause | 1,240 | 0.004839 | import django
from django.conf import settings
from django.template import Library
from distutils.version import LooseVersion
register = Library()
def filer_actions(context):
"""
Track the number of times the action field has been rendered on the page,
so we know which value to use.
"""
context['action_index'] = context.get('action_index', -1) + 1
return context
filer_actions = register.inclusion_tag("admin/filer/actions.html", takes_context=True)(filer_actions)
# Shamelessly taken from django-cms
# This will go away when django < 1.4 compatibility will be dropped
if LooseVersion(django.get_version()) < LooseVersion('1.4'):
ADMIN_ICON_BASE = "%sadmin/img/admin/" % settings.STATIC_URL
ADMIN_C | SS_BASE = "%sadmin/css/admin/" % settings.STATIC_URL |
ADMIN_JS_BASE = "%sadmin/js/admin/" % settings.STATIC_URL
else:
ADMIN_ICON_BASE = "%sadmin/img/" % settings.STATIC_URL
ADMIN_CSS_BASE = "%sadmin/css/" % settings.STATIC_URL
ADMIN_JS_BASE = "%sadmin/js/" % settings.STATIC_URL
@register.simple_tag
def admin_icon_base():
return ADMIN_ICON_BASE
@register.simple_tag
def admin_css_base():
return ADMIN_CSS_BASE
@register.simple_tag
def admin_js_base():
return ADMIN_JS_BASE
|
alirizakeles/memopol-core | memopol/votes/admin.py | Python | gpl-3.0 | 286 | 0 | # -*- coding: utf-8 -*-
from django.cont | rib import admin
from memopol.votes import models
admin.site.register(models.Proposal)
admin.site.register(models.Recommendation)
admin.site.register(models.RecommendationData)
admin.site.register(models.Vote)
admi | n.site.register(models.Score)
|
raphaelm/django-i18nfield | demoproject/demoproject/wsgi.py | Python | apache-2.0 | 399 | 0 | """
WSGI config for demoproject project.
It exposes the WSGI callable as a module-level variable named | ``application``.
For more information on this | file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings")
application = get_wsgi_application()
|
crisely09/horton | doc/update_lib_doc.py | Python | gpl-3.0 | 4,657 | 0.001718 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2016 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import importlib, os
from glob import glob
from cStringIO import StringIO
from common import write_if_changed
def discover():
# find packages
packages = {'horton': []}
for fn in glob('../horton/*/__init__.py'):
subpackage = fn.split('/')[2]
if subpackage == 'test':
continue
packages['horton.%s' % subpackage] = []
# find modules
for package, modules in packages.iteritems():
stub = package.replace('.', '/')
for fn in sorted(glob('../%s/*.py' % stub) + glob('../%s/*.so' % stub)):
module = fn.split('/')[-1][:-3]
if module == '__init__':
continue
modules.append(module)
for fn in sorted(glob('../%s/*.h' % stub)):
module = fn.split('/')[-1]
modules.append(module)
return packages
def get_first_docline(module):
m = importlib.import_module(module)
if m.__doc__ is not None:
lines = m.__doc__.split('\n')
if len(lines) > 0:
return lines[0]
return 'FIXME! Write module docstring.'
def get_first_doxygenline(fn_h):
with open('../%s' % fn_h) as f:
for line in f:
if line.startswith('// UPDATELIBDOCTITLE:'):
return line[21:].strip()
raise IOError('UPDATELIBDOCTITLE missing in %s' % fn_h)
def underline(line, char, f):
print >> f, line
print >> f, char*len(line)
print >> f
def write_disclaimer(f):
print >> f, '..'
print >> f, ' This file is automatically generated. Do not make '
print >> f, ' changes as these will be overwritten. Rather edit '
print >> f, ' the documentation in the source code.'
print >> f
def main():
packages = discover()
# Write new/updated rst files if needed
fns_rst = []
for package, modules in sorted(packages.iteritems()):
# write the new file to a StringIO
f1 = StringIO()
write_disclaimer(f1)
underline('``%s`` -- %s' % (package, get_first_docline(package)), '#', f1)
print >> f1
print >> f1, '.. automodule::', package
print >> f1, ' :members:'
print >> f1
print >> f1, '.. toctree::'
print >> f1, ' :maxdepth: 1'
print >> f1, ' :numbered:'
print >> f1
for module in modules:
f2 = StringIO()
write_disclaimer(f2)
if module.endswith('.h'):
#full = package + '/' + module
fn_h = package.replace('.', '/') + '/' + module
underline('``%s`` -- %s' % (fn_h, get_first_doxygenline(fn_h)), '#', f2)
print >> f2, '.. doxygenfile::', fn_h
print >> f2, ' :project: horton'
print >> f2
print >> f2
else:
full = package + '.' + module
underline('``%s`` -- %s' % (full, get_first_docline(full)), '#', f2)
print >> f2, '.. automodule::', full
print >> f2, ' :members:'
print >> f2
print >> f2
# write | if the contents have changed
rst_name = 'mod_%s_%s' % (package.replace('.', '_'), module.replace('.', '_'))
fn2_rst = 'lib/%s.rst' % rst_name
fns_rst.append(fn2_rst | )
write_if_changed(fn2_rst, f2.getvalue())
print >> f1, ' %s' % rst_name
# write if the contents have changed
fn1_rst = 'lib/pck_%s.rst' % package.replace('.', '_')
fns_rst.append(fn1_rst)
write_if_changed(fn1_rst, f1.getvalue())
# Remove other rst files
for fn_rst in glob('lib/*.rst'):
if fn_rst not in fns_rst:
print 'Removing %s' % fn_rst
os.remove(fn_rst)
if __name__ == '__main__':
main()
|
zhangtuoparis13/Vintageous | xsupport.py | Python | mit | 6,043 | 0.000993 | """Assorted commands.
"""
import os
import threading
import sublime
import sublime_plugin
from Vintageous.state import _init_vintageous
from Vintageous.state import State
from Vintageous.vi import settings
from Vintageous.vi import cmd_defs
from Vintageous.vi.dot_file import DotFile
from Vintageous.vi.utils import modes
from Vintageous.vi.utils import regions_transformer
class _vi_slash_on_parser_done(sublime_plugin.WindowCommand):
def run(self, key=None):
state = State(self.window.active_view())
state.motion = cmd_defs.ViSearchForwardImpl()
state.last_buffer_search = (state.motion._inp or
state.last_buffer_search)
class _vi_question_mark_on_parser_done(sublime_plugin.WindowCommand):
def run(self, key=None):
state = State(self.window.active_view())
state.motion = cmd_defs.ViSearchBackwardImpl()
state.last_buffer_search = (state.motion._inp or
state.last_buffer_search)
# TODO: Test me.
class VintageStateTracker(sublime_plugin.EventListener):
def on_post_save(self, view):
# Ensure the carets are within valid bounds. For instance, this is a
# concern when `trim_trailing_white_space_on_save` is set to true.
state = State(view)
view.run_command('_vi_adjust_carets', {'mode': state.mode})
def on_query_context(self, view, key, operator, operand, match_all):
vintage_state = State(view)
return vintage_state.context.check(key, operator, operand, match_all)
def on_close(self, view):
settings.destroy(view)
class ViMouseTracker(sublime_plugin.EventListener):
def on_text_command(self, view, command, args):
if command == 'drag_select':
state = State(view)
if state.mode in (modes.VISUAL, modes.VISUAL_LINE,
modes.VISUAL_BLOCK):
if (args.get('extend') or (args.get('by') == 'words') or
args.get('additive')):
return
elif not args.get('extend'):
return ('sequence', {'commands': [
['drag_select', args], ['_enter_normal_mode', {
'mode': state.mode}]
]})
elif state.mode == modes.NORMAL:
# TODO(guillermooo): Dragging the mouse does not seem to
# fire a different event than simply clicking. This makes it
# hard to update the xpos.
if args.get('extend') or (args.get('by') == 'words'):
return ('sequence', {'commands': [
['drag_select', args], ['_enter_visual_mode', {
'mode': state.mode}]
]})
# TODO: Test me.
class ViFocusRestorerEvent(sublime_plugin.EventListener):
def __init__(self):
self.timer = None
def action(self):
self.timer = None
def on_activated(self, view):
if self.timer:
self.timer.cancel()
# Switching to a different view; enter normal mode.
_init_vintageous(view)
else:
# Switching back from another application. Ignore.
pass
def on_deactivated(self, view):
self.timer = threading.Timer(0.25, self.action)
self.timer.start()
class _vi_adjust_carets(sublime_plugin.TextCommand):
def run(self, edit, mode=None):
def f(view, s):
if mode in (modes.NORMAL, modes.INTERNAL_NORMAL):
if ((view.substr(s.b) == '\n' or s.b == view.size())
and not view.line(s.b).empty()):
return sublime.Region(s.b - 1)
return s
regions_transformer(self.view, f)
class Sequence(sublime_plugin.TextCommand):
"""Required so that mark_undo_groups_for_gluing and friends work.
"""
def run(self, edit, commands):
for cmd, args in commands:
self.view.run_command(cmd, args)
class ResetVintageous(sublime_plugin.WindowCommand):
def run(self):
v = self.window.active_view()
v.settings().erase('vintage')
_init_vintageous(v)
DotFile.from_user().run()
print("Package.Vintageous: State reset.")
sublime.status_message("Vintageous: State reset")
class ForceExitFromCommandMode(sublime_plugin.WindowCommand):
"""
A sort of a panic button.
"""
def run(self):
v = self.window.active_view()
v.settings().erase('vintage')
# XXX: What happens exactly when the user presses Esc again now? Which
| # more are we in?
v.settings().set('command_mode', False)
v.s | ettings().set('inverse_caret_state', False)
print("Vintageous: Exiting from command mode.")
sublime.status_message("Vintageous: Exiting from command mode.")
class VintageousToggleCtrlKeys(sublime_plugin.WindowCommand):
def run(self):
prefs = sublime.load_settings('Preferences.sublime-settings')
value = prefs.get('vintageous_use_ctrl_keys', False)
prefs.set('vintageous_use_ctrl_keys', (not value))
sublime.save_settings('Preferences.sublime-settings')
status = 'enabled' if (not value) else 'disabled'
print("Package.Vintageous: Use of Ctrl- keys {0}.".format(status))
sublime.status_message("Vintageous: Use of Ctrl- keys {0}"
.format(status))
class ReloadVintageousSettings(sublime_plugin.TextCommand):
def run(self, edit):
DotFile.from_user().run()
class VintageousOpenConfigFile(sublime_plugin.WindowCommand):
"""Opens or creates $packages/User/.vintageousrc.
"""
def run(self):
path = os.path.realpath(os.path.join(sublime.packages_path(),
'User/.vintageousrc'))
if os.path.exists(path):
self.window.open_file(path)
else:
with open(path, 'w'):
pass
self.window.open_file(path)
|
VeryCB/flask-slack | docs/conf.py | Python | bsd-3-clause | 8,287 | 0.005792 | # -*- coding: utf-8 -*-
#
# flask_slack documentation build configuration file, created by
# sphinx-quickstart on Sun Oct 19 16:31:35 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('_themes'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
mast | er_doc = 'index'
# General information about the project.
project = u'flask_slack'
copyright = u'2014, VeryCB'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.5'
# The full version, including alpha/beta | /rc tags.
release = '0.1.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'flask'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'index_logo': None
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'flask_slackdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'flask_slack.tex', u'flask\\_slack Documentation',
u'VeryCB', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'flask_slack', u'flask_slack Documentation',
[u'VeryCB'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'flask_slack', u'flask_slack Documentation',
u'VeryCB', 'flask_slack', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'fo |
samphippen/london | render.py | Python | mit | 232 | 0.025862 | import Image
import json
south = 51.416;
north = 51.623;
west = | -0.415;
east = 0.179;
if __name__ == "__main__":
x = Image.fromstring("RGBA", (2668 | ,1494), open("output_pixel_data").read())
x.save("lit-map.png", "PNG")
|
tensor-tang/Paddle | python/paddle/fluid/tests/unittests/test_unique_with_counts.py | Python | apache-2.0 | 2,925 | 0 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
from paddle.fluid.op import Operator
class TestUniqueWithCountsOp(OpTest):
def setUp(self):
self.op_type = "unique_with_counts"
self.init_config()
def test_check_output(self):
self.check_output()
def init_config(self):
self.inputs = {'X': np.array([2, 3, 3, 1, 5, 3], dtype='int64'), }
self.attrs = {'dtype': int(core.VarDesc.VarType.INT32)}
| self.outputs = {
'Out': np.array(
[2, 3, 1, 5], dtype='int64'),
'Index': np.array(
[0, 1, 1, 2, 3, 1], dtype='int32'),
'Count': np.array(
[1, 3, 1, 1], dtype='int32')
}
class TestOne(TestUniqueWithCountsOp):
def init_config(self):
self.inputs = {'X': np.array([2], dtype='int64'), }
self.attrs = { | 'dtype': int(core.VarDesc.VarType.INT32)}
self.outputs = {
'Out': np.array(
[2], dtype='int64'),
'Index': np.array(
[0], dtype='int32'),
'Count': np.array(
[1], dtype='int32')
}
class TestRandom(TestUniqueWithCountsOp):
def init_config(self):
input_data = np.random.randint(0, 100, (2000, ), dtype='int64')
self.inputs = {'X': input_data}
self.attrs = {'dtype': int(core.VarDesc.VarType.INT64)}
np_unique, np_index, reverse_index = np.unique(self.inputs['X'], True,
True)
np_tuple = [(np_unique[i], np_index[i]) for i in range(len(np_unique))]
np_tuple.sort(key=lambda x: x[1])
target_out = np.array([i[0] for i in np_tuple], dtype='int64')
target_index = np.array(
[list(target_out).index(i) for i in self.inputs['X']],
dtype='int64')
count = [0 for i in range(len(np_unique))]
for i in range(target_index.shape[0]):
count[target_index[i]] += 1
target_count = np.array(count, dtype='int64')
self.outputs = {
'Out': target_out,
'Index': target_index,
'Count': target_count
}
if __name__ == "__main__":
unittest.main()
|
markosa/hydromon-client | net/hydromon/client/serverclient.py | Python | gpl-2.0 | 2,086 | 0.009108 | '''
Created on May 8, 2015
@author: markos
'''
import json
import logging
import requests
from net.hydromon.config import ConfigurationUtil
import os
from requests.exceptions import ConnectionError, RequestException
import sys
import traceback
from | time import strftime
import datetime
log = logging.getLogger(__name__)
def send(valuedto):
payload = json.dumps(valuedto, default=serialize_valuedto, indent=2)
endpointUrl = ConfigurationUtil.getAddVa | lueEndpointUrl(valuedto.sensorId)
log.debug("Sending data: " + payload + " to " + endpointUrl)
response = None
try:
response = requests.post(endpointUrl, payload, timeout=20)
except RequestException as re:
log.fatal(re)
except:
log.fatal("Unknown error occurred!")
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback)
finally:
if response is not None:
log.debug(response)
if response.status_code != 200:
log.warn("Received response code %s " % response.status_code)
handleError(valuedto)
else:
handleError(valuedto)
def serialize_valuedto(o):
res = o.__dict__.copy()
del res['sensorId']
return res
def handleError(valuedto):
':type valuedto: net.hydromon.dto.valuedto'
savedir = ConfigurationUtil.getEmergencySaveDirectoryForSensor(valuedto.sensorId)
if not os.path.exists(savedir):
os.mkdir(savedir)
if not os.access(savedir, os.W_OK):
log.error("Unable to write emergency data in %s" % savedir)
return
date = datetime.datetime.fromtimestamp(valuedto.time / 1e3)
filename = savedir + "/sensordata_" + strftime("%m%d%Y_%H%M%S",date.timetuple()) + ".json"
payload = json.dumps(valuedto, default=serialize_valuedto, indent=2)
f = open(filename, 'w')
f.write(payload)
f.flush()
f.close()
log.warn("Wrote recovery data file %s " % filename)
if __name__ == '__main__':
pass |
abo-abo/edx-platform | lms/djangoapps/courseware/courses.py | Python | agpl-3.0 | 11,362 | 0.001672 | from collections import defaultdict
from fs.errors import ResourceNotFoundError
import logging
import inspect
import re
from path import path
from django.http import Http404
from django.conf import settings
from .module_render import get_module
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore import Location, XML_MODULESTORE_TYPE
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.content import StaticContent
from xmodule.modulestore.exceptions import ItemNotFoundError, InvalidLocationError
from courseware.model_data import FieldDataCache
from static_replace import replace_static_urls
from courseware.access import has_access
import branding
log = logging.getLogger(__name__)
def get_request_for_thread():
"""Walk up the stack, return the nearest first argument named "request"."""
frame = None
try:
for f in inspect.stack()[1:]:
frame = f[0]
code = frame.f_code
if code.co_varnames[:1] == ("request",):
return frame.f_locals["request"]
elif code.co_varnames[:2] == ("self", "request",):
return frame.f_locals["request"]
finally:
del frame
def get_course(course_id, depth=0):
"""
Given a course id, return the corresponding course descriptor.
If course_id is not valid, raises a ValueError. This is appropriate
for internal use.
depth: The number of levels of children for the modulestore to cache.
None means infinite depth. Default is to fetch no children.
"""
try:
course_loc = CourseDescriptor.id_to_location(course_id)
return modulestore().get_instance(course_id, course_loc, depth=depth)
except (KeyError, ItemNotFoundEr | ror):
raise ValueError("Course not found: {}".format(course_id))
except InvalidLocationError:
raise Va | lueError("Invalid location: {}".format(course_id))
def get_course_by_id(course_id, depth=0):
"""
Given a course id, return the corresponding course descriptor.
If course_id is not valid, raises a 404.
depth: The number of levels of children for the modulestore to cache. None means infinite depth
"""
try:
course_loc = CourseDescriptor.id_to_location(course_id)
return modulestore().get_instance(course_id, course_loc, depth=depth)
except (KeyError, ItemNotFoundError):
raise Http404("Course not found.")
except InvalidLocationError:
raise Http404("Invalid location")
def get_course_with_access(user, course_id, action, depth=0):
"""
Given a course_id, look up the corresponding course descriptor,
check that the user has the access to perform the specified action
on the course, and return the descriptor.
Raises a 404 if the course_id is invalid, or the user doesn't have access.
depth: The number of levels of children for the modulestore to cache. None means infinite depth
"""
course = get_course_by_id(course_id, depth=depth)
if not has_access(user, course, action):
# Deliberately return a non-specific error message to avoid
# leaking info about access control settings
raise Http404("Course not found.")
return course
def get_opt_course_with_access(user, course_id, action):
"""
Same as get_course_with_access, except that if course_id is None,
return None without performing any access checks.
"""
if course_id is None:
return None
return get_course_with_access(user, course_id, action)
def course_image_url(course):
"""Try to look up the image url for the course. If it's not found,
log an error and return the dead link"""
if course.static_asset_path or modulestore().get_modulestore_type(course.location.course_id) == XML_MODULESTORE_TYPE:
return '/static/' + (course.static_asset_path or getattr(course, 'data_dir', '')) + "/images/course_image.jpg"
else:
loc = course.location.replace(tag='c4x', category='asset', name=course.course_image)
_path = StaticContent.get_url_path_from_location(loc)
return _path
def find_file(filesystem, dirs, filename):
"""
Looks for a filename in a list of dirs on a filesystem, in the specified order.
filesystem: an OSFS filesystem
dirs: a list of path objects
filename: a string
Returns d / filename if found in dir d, else raises ResourceNotFoundError.
"""
for directory in dirs:
filepath = path(directory) / filename
if filesystem.exists(filepath):
return filepath
raise ResourceNotFoundError("Could not find {0}".format(filename))
def get_course_about_section(course, section_key):
"""
This returns the snippet of html to be rendered on the course about page,
given the key for the section.
Valid keys:
- overview
- title
- university
- number
- short_description
- description
- key_dates (includes start, end, exams, etc)
- video
- course_staff_short
- course_staff_extended
- requirements
- syllabus
- textbook
- faq
- more_info
- ocw_links
"""
# Many of these are stored as html files instead of some semantic
# markup. This can change without effecting this interface when we find a
# good format for defining so many snippets of text/html.
# TODO: Remove number, instructors from this list
if section_key in ['short_description', 'description', 'key_dates', 'video',
'course_staff_short', 'course_staff_extended',
'requirements', 'syllabus', 'textbook', 'faq', 'more_info',
'number', 'instructors', 'overview',
'effort', 'end_date', 'prerequisites', 'ocw_links']:
try:
request = get_request_for_thread()
loc = course.location.replace(category='about', name=section_key)
# Use an empty cache
field_data_cache = FieldDataCache([], course.id, request.user)
about_module = get_module(
request.user,
request,
loc,
field_data_cache,
course.id,
not_found_ok=True,
wrap_xmodule_display=False,
static_asset_path=course.static_asset_path
)
html = ''
if about_module is not None:
html = about_module.render('student_view').content
return html
except ItemNotFoundError:
log.warning("Missing about section {key} in course {url}".format(
key=section_key, url=course.location.url()))
return None
elif section_key == "title":
return course.display_name_with_default
elif section_key == "university":
return course.display_org_with_default
elif section_key == "number":
return course.display_number_with_default
raise KeyError("Invalid about key " + str(section_key))
def get_course_info_section(request, course, section_key):
"""
This returns the snippet of html to be rendered on the course info page,
given the key for the section.
Valid keys:
- handouts
- guest_handouts
- updates
- guest_updates
"""
loc = Location(course.location.tag, course.location.org, course.location.course, 'course_info', section_key)
# Use an empty cache
field_data_cache = FieldDataCache([], course.id, request.user)
info_module = get_module(
request.user,
request,
loc,
field_data_cache,
course.id,
wrap_xmodule_display=False,
static_asset_path=course.static_asset_path
)
html = ''
if info_module is not None:
html = info_module.render('student_view').content
return html
# TODO: Fix this such that these are pulled in as extra course-specific tabs.
# arjun will address this by the end of October if no one does so prior to
# then.
def get_course_syllabus_section(course, section_key):
"""
This returns the snippet of html to be rendered on the syllabus page,
given t |
ymichael/cprofilev | example_for_profiling.py | Python | mit | 354 | 0 | """This is just a simple example for testing cprofilev. To see cprofilev in
action, run
python -m cprofilev example_for_profiling.py
"""
import random
import time
def product(x, y):
retur | n x * y
def main():
x = 1.
while True:
x = product(x, 0.5 + random.random())
time.sleep(0.1) |
if __name__ == '__main__':
main()
|
larsbergstrom/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py | Python | mpl-2.0 | 18,381 | 0.001959 | import json
import os
import socket
import sys
import threading
import time
import traceback
import urlparse
import uuid
from .base import (CallbackHandler,
RefTestExecutor,
RefTestImplementation,
TestharnessExecutor,
extra_timeout,
strip_server)
from .protocol import (BaseProtocolPart,
TestharnessProtocolPart,
Protocol,
SelectorProtocolPart,
ClickProtocolPart,
SendKeysProtocolPart,
ActionSequenceProtocolPart,
TestDriverProtocolPart,
GenerateTestReportProtocolPart)
from ..testrunner import Stop
import webdriver as client
here = os.path.join(os.path.split(__file__)[0])
class WebDriverBaseProtocolPart(BaseProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def execute_script(self, script, asynchronous=False):
method = self.webdriver.execute_async_script if asynchronous else self.webdriver.execute_script
return method(script)
def set_timeout(self, timeout):
try:
self.webdriver.timeouts.script = timeout
except client.WebDriverException:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=2057
body = {"type": "script", "ms": timeout * 1000}
self.webdriver.send_session_command("POST", "timeouts", body)
@property
def current_window(self):
return self.webdriver.window_handle
def set_window(self, handle):
self.webdriver.window_handle = handle
def wait(self):
while True:
try:
self.webdriver.execute_async_script("")
except (client.TimeoutException, client.ScriptTimeoutException):
pass
except (socket.timeout, client.NoSuchWindowException,
client.UnknownErrorException, IOError):
break
except Exception as e:
self.logger.error(traceback.format_exc(e))
break
class WebDriverTestharnessProtocolPart(TestharnessProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
self.runner_handle = None
with open(os.path.join(here, "runner.js")) as f:
self.runner_script = f.read()
def load_runner(self, url_protocol):
if self.runner_handle:
self.webdriver.window_handle = self.runner_handle
url = urlparse.urljoin(self.parent.executor.server_url(url_protocol),
"/testharness_runner.html")
self.logger.debug("Loading %s" % url)
self.webdriver.url = url
self.runner_handle = self.webdriver.window_handle
format_map = {"title": threading.current_thread().name.replace("'", '"')}
self.parent.base.execute_script(self.runner_script % format_map)
def close_old_windows(self):
self.webdriver.actions.release()
handles = [item for item in self.webdriver.handles if item != self.runner_handle]
for handle in handles:
try:
self.webdriver.window_handle = handle
self.webdriver.close()
except client.NoSuchWindowException:
pass
self.webdriver.window_handle = self.runner_handle
return self.runner_handle
def get_test_window(self, window_id, parent, timeout=5):
"""Find the test window amongst all the open windows.
This is assumed to be either the named window or the one after the parent in the list of
window handles
:param window_id: The DOM name of the Window
:param parent: The handle of the runner window
:param timeout: The time in seconds to wait for the window to appear. This is because in
some implementations there's a race between calling window.open and the
window being added to the list of WebDriver accessible windows."""
test_window = None
end_time = time.time() + timeout
while time.time() < end_time:
try:
# Try using the JSON serialization of the WindowProxy object,
# it's in Level 1 but nothing supports it yet
win_s = self.webdriver.execute_script("return window['%s'];" % window_id)
win_obj = json.loads(win_s)
test_window | = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
except Exception:
pass
if test_window is None:
after = self.webdriver.handles
if len(after) == 2:
test_window = next(iter(set(after) - {parent}))
elif after[0] == parent and len(after) > 2:
# Hope the first one h | ere is the test window
test_window = after[1]
if test_window is not None:
assert test_window != parent
return test_window
time.sleep(0.1)
raise Exception("unable to find test window")
class WebDriverSelectorProtocolPart(SelectorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def elements_by_selector(self, selector):
return self.webdriver.find.css(selector)
def elements_by_selector_and_frame(self, element_selector, frame):
return self.webdriver.find.css(element_selector, frame=frame)
class WebDriverClickProtocolPart(ClickProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def element(self, element):
self.logger.info("click " + repr(element))
return element.click()
class WebDriverSendKeysProtocolPart(SendKeysProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_keys(self, element, keys):
try:
return element.send_keys(keys)
except client.UnknownErrorException as e:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=1999
if (e.http_status != 500 or
e.status_code != "unknown error"):
raise
return element.send_element_command("POST", "value", {"value": list(keys)})
class WebDriverActionSequenceProtocolPart(ActionSequenceProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_actions(self, actions):
self.webdriver.actions.perform(actions['actions'])
class WebDriverTestDriverProtocolPart(TestDriverProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_message(self, message_type, status, message=None):
obj = {
"type": "testdriver-%s" % str(message_type),
"status": str(status)
}
if message:
obj["message"] = str(message)
self.webdriver.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
class WebDriverGenerateTestReportProtocolPart(GenerateTestReportProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def generate_test_report(self, message):
json_message = {"message": message}
self.webdriver.send_session_command("POST", "reporting/generate_test_report", json_message)
class WebDriverProtocol(Protocol):
implements = [WebDriverBaseProtocolPart,
WebDriverTestharnessProtocolPart,
WebDriverSelectorProtocolPart,
WebDriverClickProtocolPart,
WebDriverSendKeysProtocolPart,
WebDriverActionSequenceProtocolPart,
WebDriverTestDriverProtocolPart,
WebDriverGenerateTestReportProtocolPart]
def __init__(self, executor, browser, capabilities, **kwargs):
super(WebDriverProtocol, self).__init__(executor, browser)
self.capabilities = capabilities
self.url = browser.webdriver_url
self.webdriver = None
def connect(self):
"""Connect to browser via WebDriver."""
|
dongjoon-hyun/tools | telegram-shell/telegram-shell.py | Python | apache-2.0 | 1,508 | 0.002653 | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getpass
import netrc
import subprocess
import telebot
info = netrc.netrc()
login, account, password = info.authent | icators("api.telegram.org")
token = password
bot = telebot.TeleBot(token)
def run(cmd, message):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
print out, err
bot.send_mess | age(message.chat.id, '```\n' + out + '```',
disable_web_page_preview=True,
disable_notification=True,
parse_mode='Markdown')
@bot.message_handler(func=lambda m: True)
def shell(message):
try:
if message.from_user.username == getpass.getuser():
cmd = message.text.split()
cmd[0] = 'cmd/%s.sh' % cmd[0]
print cmd
run(cmd, message)
else:
print message.from_user.username, message.text
except:
print message
bot.polling()
|
richm/389-perf-test | nconns.py | Python | gpl-3.0 | 504 | 0 | import sys
import time
import ldap
url, binddn, bindpw, basedn, nconns, niters = sys.arg | v[1:]
conns = []
for ii in xrange(0, int(nconns)):
conn = ldap.initialize(url)
conns.append(conn)
for conn in conns:
conn.simple_bind(binddn, bindpw)
for ii in xrange(0, int(niters)):
for conn | in conns:
ents = conn.search_s(basedn, ldap.SCOPE_SUBTREE, "uid=scarter")
assert(len(ents) == 1)
assert(ents[0][1]['uid'][0] == 'scarter')
for conn in conns:
conn.unbind_s()
|
monuszko/django-ratings | ratings/forms.py | Python | gpl-2.0 | 1,958 | 0.001532 | from django import forms
f | rom django.core.exceptions import ValidationError
from ratings.models import Choice, Score
class ScoreForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
self.criteria = kwargs.pop('criteria', None)
self.obj_id = kwargs.pop('obj_id', None)
super(ScoreForm, self).__init__(*args, **kwargs)
self.fields['value'] = forms.ChoiceField(choices=self._get_choic | es(
please_select='Please select:'))
def _get_choices(self, please_select=None):
choices = Choice.objects.values_list('value', 'label')
if please_select is not None:
choices = tuple([(666, please_select)] + list(choices))
return choices
def _get_choice_values(self):
return [first for first, second in self._get_choices()]
def min(self):
values = self._get_choice_values()
return min(values) if values else 0 # min, max fixed in 3.4
def max(self):
values = self._get_choice_values()
return max(values) if values else 0
def name(self):
return self.criteria.name
def clean(self):
cleaned_data = super(ScoreForm, self).clean()
value = cleaned_data['value']
rng = [unicode(v) for v in self._get_choice_values()]
if Score.objects.filter(user=self.user,
object_id=self.obj_id,
content_type=self.criteria.content_type,
criteria=self.criteria).exists():
raise ValidationError("Only one score per user allowed.")
if value not in rng:
rng = ', '.join(rng)
raise ValidationError("Values should be in range {0}".format(rng))
return cleaned_data
class Meta:
model = Score
widgets = {
'comment': forms.Textarea(attrs={'cols': 40, 'rows': 3}),
}
|
sasmita/upm | examples/python/mpu9150.py | Python | mit | 2,714 | 0.001842 | #!/usr/bin/python
# Author: Jon Trulson <jtrulson@ics.com>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_mpu9150 as sensorObj
def main():
# Instantiate an MPU9150 on I2C bus 0
sensor = sensorObj.MPU9150()
## Exit handlers ##
# This function stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
sensor.init()
x = sensorObj.new_floatp()
y = sensorObj.new_floatp()
z = sensorObj.new_floatp()
while (1):
sensor.update()
sensor.getAccelerometer(x, y, z)
print("Accelerometer: AX: ", sensorObj.floatp_value(x), end=' ')
p | rint(" AY: ", sen | sorObj.floatp_value(y), end=' ')
print(" AZ: ", sensorObj.floatp_value(z))
sensor.getGyroscope(x, y, z)
print("Gyroscope: GX: ", sensorObj.floatp_value(x), end=' ')
print(" GY: ", sensorObj.floatp_value(y), end=' ')
print(" GZ: ", sensorObj.floatp_value(z))
sensor.getMagnetometer(x, y, z)
print("Magnetometer: MX: ", sensorObj.floatp_value(x), end=' ')
print(" MY: ", sensorObj.floatp_value(y), end=' ')
print(" MZ: ", sensorObj.floatp_value(z))
print("Temperature: ", sensor.getTemperature())
print()
time.sleep(.5)
if __name__ == '__main__':
main()
|
heytrav/drs-project | domain_api/migrations/0042_remove_registereddomain_domain.py | Python | mit | 406 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-06-16 10:38
from __future__ import | unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('domain_api', '0041_auto_20170616_1037'),
]
operations = [
migrations.RemoveField(
model_name='registereddomain',
name='domain',
| ),
]
|
roalddevries/django-dynamicforms | dynamicforms/admin.py | Python | apache-2.0 | 3,276 | 0.002137 | from datetime import date
from django import forms
from django.http import HttpResponse
from django.contrib import admin
from models import DynamicFieldValue, DynamicField, DynamicFormFieldRelation, DynamicForm, DynamicFormData
from StringIO import StringIO
from zipfile import ZipFile
import csv
class DynamicFieldValue_Inline(admin.TabularInline):
model = DynamicFieldValue
extra = 0
class DynamicFieldAdminForm(forms.ModelForm):
class Meta:
model = DynamicField
def __init__(self, *args, **kwargs):
super(DynamicFieldAdminForm, self).__init__(*args, **kwargs)
self.fields['default'].queryset = self.instance.values.all() if self.instance else DynamicFieldValue.objects.none()
class DynamicFieldAdmin(admin.ModelAdmin):
model = DynamicField
inlines = [DynamicFieldValue_Inline]
form = DynamicFieldAdminForm
list_display = ['label', 'type', 'required', 'default', 'help_text']
list_editable = ['type', 'required', 'help_text']
list_filter = ['dynamicform__name']
class DynamicFormFieldRelation_Inline(admin.TabularInline):
model = DynamicFormFieldRelation
extra = 0
class DynamicFormAdmin(admin.ModelAdmin):
model = DynamicForm
fieldsets = (
(None, {'fields': ['name', 'slug', 'type', 'success_url', 'notification_emails']}),
('Confirmation e-mail', {'classes': ['collapse'], 'fields': ['send_confirmation', 'email_recipients', 'email_subject', 'email_content']}),
)
inlines = [DynamicFormFieldRelation_Inline]
prepopulated_fields = {'slug': ['name']}
list_display = ['name', 'slug', 'type', 'success_url']
list_editable = ['type', 'success_url']
actions = ['export_data_as_csv']
def export_form_data_as_csv(self, dynamicform, output):
writer = csv.DictWriter(output, fieldnames=dynamicform.field_names)
writer.writerow(dict((f, f) for f in dynamicform.field_names))
for row in dynamicform.data_as_dicts():
writer.writerow(row)
def export_data_as_csv(self, request, queryset):
output = StringIO()
if queryset.count() == 1:
self.export_form_data_as_csv(queryset.get(), output)
mimetype = 'text/csv'
filename = '%s.%s.csv' % (queryset.get().name, date.today())
else:
zipfile = ZipFile(output, 'w')
for dynamicform in queryset:
csv_output = StringIO()
self.export_form_data_as_csv(dynamicform, csv_output)
filename = '%s.%s.csv' % (dynamicform.name, date.today())
zipfile.writestr(filename, csv_output.getvalue())
zipfile.close()
mimetype = 'application/zip'
filename = 'dynamicforms-data.%s.zip' % date.today()
response = HttpResponse(output.getvalue(), mimetype=mimetype)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
class DynamicFormDataAdmin(admin.ModelAdmin):
model = DynamicFormData
list_display = ['dynamicform', 'timestamp']
list_filter = ['dynamicform__name']
admin.site.register(D | ynamicField, DynamicFieldAdmin)
admin.site.register(DynamicForm, DynamicFormAdmin)
admin.site.register(DynamicFormData, Dyn | amicFormDataAdmin)
|
eruffaldi/pyoni | src/onitool/oni2depth.py | Python | mit | 1,555 | 0.01672 | # Extracts a singla RAW depth file of contiguous data
#
# Requires to build xndec
import struct
from onifile import *
from xndec import *
if __name__ == "__main__":
import sys,os
if len(sys.argv) == 0:
print "Required: ONIfilename"
sys.exit(-1)
docolor = len(sys.argv) > 3
filesize = os.stat(sys.argv[1]).st_size
a = open(sys.argv[1],"rb")
h0 = readhead1(a)
mid = 0 # maximum identifier
prelast = None
last = None
st = None
offdict = dict()
count = 0
ob = allocoutput16(512*424)
# scan all and keep pre and last
while True:
h = readrechead(a)
if h is None:
break
prelast = last
last = h
if h["nid"] > mid:
mid = h["nid"]
if h["nid"] == 1:
if h["rt"] == RECORD_NEW_DATA:
pd = parsedata(a,h)
print pd["dataoffset"],h["ps"],h["fs"]
z = a.read(h["ps"])
count += 1
if count == 50:
code,size = doXnStreamUncompressDepth16ZWithEmbTable(z,ob)
print "decoded ",code,size,"v | s input",len(z),"output",len(ob)
o = open("x.depth","wb")
o.write(ob)
o.close()
break
if h["rt"] | == RECORD_END:
continue
a.seek(h["nextheader"],0) #h["fs"]-HEADER_SIZE+h["ps"],1)
a.close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.