repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
kvangent/PokeAlarm
|
tests/filters/__init__.py
|
Python
|
agpl-3.0
| 2,293
| 0
|
import logging
class MockManager(object):
""" Mock manager for filter unit testing. """
def get_child_logger(self, name):
return logging.getLogger('test').getChild(name)
def generic_filter_test(test):
"""Decorator used for creating a generic filter test.
Requires the argument to be a function that assigns the following
attributes when called:
filt = dict used to generate the filter,
event_key = key for the event values,
pass_vals = values that create passing events,
fail_vals = values that create failing events
"""
test(test)
def generic_test(self):
# Create the filter
filt = self.gen_filter(test.filt)
# Test passing
for val in test.pass_vals:
event = self.gen_event({test.event_key: val})
self.assertTrue(
filt.check_event(event),
"pass_val failed check in {}: \n{} passed {}"
"".format(tes
|
t.__name__, event, filt))
# Test failing
for val in test.fail_vals:
event = self.gen_event({test.event_key: val})
self.assertFalse(
filt.check_event(event),
"fail_val passed check in {}: \n{} passed {}"
|
"".format(test.__name__, event, filt))
return generic_test
def full_filter_test(test):
"""Decorator used for creating a full filter test.
Requires the argument to be a function that assigns the following
attributes when called:
filt = dict used to generate the filter,
pass_items = array of dicts that should pass,
fail_items = array of dicts that should fail
"""
test(test)
def full_test(self):
filt = self.gen_filter(test.filt)
for val in test.pass_items:
event = self.gen_event(val)
self.assertTrue(
filt.check_event(event),
"pass_val failed check in {}: \n{} passed {}"
.format(test.__name__, event, filt))
for val in test.fail_items:
event = self.gen_event(val)
self.assertFalse(
filt.check_event(event),
"fail_val passed check in {}: \n{} passed {}"
"".format(test.__name__, event, filt))
return full_test
|
beheh/fireplace
|
tests/test_carddb.py
|
Python
|
agpl-3.0
| 1,192
| 0.025168
|
from hearthstone.enums import CardType, GameTag, Rarity
import utils
CARDS = utils.fireplace.cards.db
def test_all_tags_known():
"""
Iterate through the card database and check that all specified GameTags
are known in hearthstone.enums.GameTag
"""
unknown_tags = set()
known_tags = list(GameTag)
known_rarities = list(Rarity)
# Check the db loaded correctly
assert utils.fireplace.cards.db
for card in CARDS.values():
for tag in card.tags:
# We have fake tags in fireplace.enums which are always negative
if tag not in known_tags and tag > 0:
unknown_tags.add(tag)
# Test rarities as well (cf. TB_BlingBrawl_Blade1e in 10956...)
assert card.rarity in known_rarities
assert not unknown_tags
def test_play_scripts():
for card in C
|
ARDS.values():
if card.scripts.activate:
assert card.type == CardType.HERO_POWER
elif card.scripts.play:
assert card.type
|
not in (CardType.HERO, CardType.HERO_POWER, CardType.ENCHANTMENT)
def test_card_docstrings():
for card in CARDS.values():
c = utils.fireplace.utils.get_script_definition(card.id)
name = c.__doc__
if name is not None:
if name.endswith(")"):
continue
assert name == card.name
|
GutenkunstLab/SloppyCell
|
Example/Gutenkunst2007/Lee_2003/reproduction.py
|
Python
|
bsd-3-clause
| 1,316
| 0.00304
|
from SloppyCell.ReactionNetworks import *
import Nets
traj2a = Dynamics.integrate(Nets.fig2a, [0, 3*60])
traj2b = Dynamics.integrate(Nets.fig2b, [0, 3*60])
traj2c = Dynamics.integrate(Nets.fig2c, [0, 3*60])
traj2d = Dynamics.integrate(Nets.fig2d, [0, 3*60])
traj2e = Dynamics.integrate(Nets.fig2e, [0, 3*60])
Plotting.figure(2)
for traj in [traj2a, traj2b, traj2c, traj2d, traj2e]:
percent = 100*traj.get_var_traj('BCatenin')/traj.get_var_val('BCatenin', 0)
Plotting.plot(traj.get_times()/60., percent, '-k')
Plotting.axis([0, 3, 0, 105])
traj6a = Dynamics.integrate(Nets.fig6a, [0, 16*60])
traj6b = Dynamics.integrate(Nets.fig6b, [0, 16*60])
traj6
|
c = Dynamics.integrate(Nets.fig6c, [0, 16*60])
Plotting.figure(6, (5, 10))
Plotting.
|
subplot(2,1,1)
Plotting.plot(traj6a.get_times()/60., traj6a.get_var_traj('BCatenin'), '-k')
Plotting.plot(traj6b.get_times()/60., traj6b.get_var_traj('BCatenin'), '-r')
Plotting.plot(traj6c.get_times()/60., traj6c.get_var_traj('BCatenin'), '-g')
Plotting.axis([-1, 16, 34, 72])
Plotting.subplot(2,1,2)
Plotting.plot(traj6a.get_times()/60., 1000*traj6a.get_var_traj('Axin'), '-k')
Plotting.plot(traj6b.get_times()/60., 1000*traj6b.get_var_traj('Axin'), '-r')
Plotting.plot(traj6c.get_times()/60., 1000*traj6c.get_var_traj('Axin'), '-g')
Plotting.axis([-1, 16, 0, 22])
|
neno1978/pelisalacarta
|
python/main-classic/servers/googlevideo.py
|
Python
|
gpl-3.0
| 2,080
| 0.000962
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Google Video basado en flashvideodownloader.org
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# ------------------------------------------------------------
import re
from core import logger
from core import scrapertools
# Returns an array of possible video url's from the page_url
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("(page_url='%s')" % page_url)
video_urls = []
# Lo extrae a parti
|
r de flashvideodownloader.org
if page_url.startsw
|
ith("http://"):
url = 'http://www.flashvideodownloader.org/download.php?u=' + page_url
else:
url = 'http://www.flashvideodownloader.org/download.php?u=http://video.google.com/videoplay?docid=' + page_url
logger.info("url=" + url)
data = scrapertools.cache_page(url)
# Extrae el vídeo
newpatron = '</script>.*?<a href="(.*?)" title="Click to Download">'
newmatches = re.compile(newpatron, re.DOTALL).findall(data)
if len(newmatches) > 0:
video_urls.append(["[googlevideo]", newmatches[0]])
for video_url in video_urls:
logger.info("%s - %s" % (video_url[0], video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'http://video.google.com/googleplayer.swf.*?docid=([0-9]+)'
logger.info("#" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[googlevideo]"
if match.count("&") > 0:
primera = match.find("&")
url = match[:primera]
else:
url = match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'googlevideo'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
|
ingadhoc/odoo-logistic
|
logistic_project/wizard/__init__.py
|
Python
|
agpl-3.0
| 269
| 0.003717
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
###########
|
########
|
###########################################################
|
sohail-aspose/Aspose_Tasks_Cloud
|
SDKs/Aspose.Tasks_Cloud_SDK_for_Python/asposetaskscloud/TasksApi.py
|
Python
|
mit
| 143,735
| 0.013247
|
#!/usr/bin/env python
import sys
import os
import urllib
import json
import re
from models import *
from ApiClient import ApiException
class TasksApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def DeleteProjectAssignment(self, name, assignmentUid, **kwargs):
"""Deletes a project assignment with all references to it.
Args:
name (str): The name of the file. (required)
assignmentUid (int): assignment Uid (required)
storage (str): The document storage. (optional)
folder (str): The document folder. (optional)
fileName (str): The name of the project document to save changes to. If this parameter is omitted then the changes will be saved to the source project document. (optional)
Returns: SaaSposeResponse
"""
allParams = dict.fromkeys(['name', 'assignmentUid', 'storage', 'folder', 'fileName'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteProjectAssignment" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/tasks/{name}/assignments/{assignmentUid}/?appSid={appSid}&storage={storage}&folder={folder}&fileName={fileName}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'name' in allParams and allParams['name'] is not None:
resourcePath = resourcePath.replace("{" + "name" + "}" , str(allParams['name']))
else:
resourcePath = re.sub("[&?]name.*?(?=&|\\?|$)", "", resourcePath)
if 'assignmentUid' in allParams and allParams['assignmentUid'] is not None:
resourcePath = resourcePath.replace("{" + "assignmentUid" + "}" , str(allParams['assignmentUid']))
else:
resourcePath = re.sub("[&?]assignmentUid.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'folder' in allParams and allParams['folder'] is not None:
resourcePath = resourcePath.replace("{" + "folder" + "}" , str(allParams['folder']))
else:
resourcePath = re.sub("[&?]folder.*?(?=&|\\?|$)", "", resourcePath)
if 'fileName' in allParams and allParams['fileName'] is not None
|
:
resourcePath = resourcePath.replace("{" + "fileName" + "}" , str(allParams['fil
|
eName']))
else:
resourcePath = re.sub("[&?]fileName.*?(?=&|\\?|$)", "", resourcePath)
method = 'DELETE'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'SaaSposeResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetProjectAssignment(self, name, assignmentUid, **kwargs):
"""Read project assignment.
Args:
name (str): The name of the file. (required)
assignmentUid (int): Assignment Uid (required)
storage (str): The document storage. (optional)
folder (str): The document folder. (optional)
Returns: AssignmentResponse
"""
allParams = dict.fromkeys(['name', 'assignmentUid', 'storage', 'folder'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetProjectAssignment" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/tasks/{name}/assignments/{assignmentUid}/?appSid={appSid}&storage={storage}&folder={folder}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'name' in allParams and allParams['name'] is not None:
resourcePath = resourcePath.replace("{" + "name" + "}" , str(allParams['name']))
else:
resourcePath = re.sub("[&?]name.*?(?=&|\\?|$)", "", resourcePath)
if 'assignmentUid' in allParams and allParams['assignmentUid'] is not None:
resourcePath = resourcePath.replace("{" + "assignmentUid" + "}" , str(allParams['assignmentUid']))
else:
resourcePath = re.sub("[&?]assignmentUid.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'folder' in allParams and allParams['folder'] is not None:
resourcePath = resourcePath.replace("{" + "folder" + "}" , str(allParams['folder']))
else:
resourcePath = re.sub("[&?]folder.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'AssignmentResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetProjectAssignments(self, name, **kwargs):
"""Read project assignment items.
Args:
name (str): The name of the file. (required)
storage (str): The document storage. (optional)
folder (str): The document folder. (optional)
Returns: AssignmentItemsResponse
"""
allParams = dict.fromkeys(['name', 'storage', 'folder'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetProjectAssignments" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/tasks/{name}/assignments/?appSid={appSid}&storage={storage}&folder={folder}'
resourcePath = resourcePath.replace('&','&').
|
HarrisonHDU/myerp
|
apps/sims/urls.py
|
Python
|
mit
| 526
| 0.001901
|
__author__ = 'Administrator'
from django.conf.urls import patterns
from django.contrib.auth.v
|
iews import login, logout_then_login
urlpatterns = patterns('',
(r'^$', 'apps.sims.views.index_view'),
(r'index/$', 'apps.sims.views.index_view'),
(r'login/$', lo
|
gin, {'template_name': 'sims/login.html'}),
(r'logout/$', logout_then_login),
(r'stuinfo/$', 'apps.sims.views.student_info_list'),
(r'save/$', 'apps.sims.views.save_student'),
(r'delete/$', 'apps.sims.views.delete_student'),
)
|
mauzeh/formation-flight
|
runs/singlehub/z/run.py
|
Python
|
mit
| 1,259
| 0.007943
|
#!/usr/bin/env python
"""Simulation bootstrapper"""
from formation_flight.formation import handlers as formation_handlers
from formation_flight.aircraft import handlers as aircraft_handlers
from formation_flight.aircraft import generators
from formation_flight.hub import builders
from formation_flight.hub import allocators
from lib import sim, debug, sink
from lib.debug import print_line as p
from formation_flight import statistics
import config
import os
import numpy as np
config.sink_dir = '%s/sink' % os.path.dirname(__file__)
def init():
sink.init(config.sink_dir)
def execute():
init()
for z in np.linspace(0, 1, 250):
config.Z = z
single_run()
def si
|
ngle_run():
sim.init()
aircraft_handlers.init()
formation_handlers.init()
statistics.init()
# Construct flight list
planes = generators.get_via_stdin()
# Find hubs
config.hubs = builders.build_hubs(planes, config.count_hubs, config.Z)
# Allocate hubs to flights
allocators.allocate(planes, config.hubs)
|
for flight in planes:
sim.events.append(sim.Event('aircraft-init', flight, 0))
sim.run()
sink.push(statistics.vars)
debug.print_dictionary(statistics.vars)
|
matrix-org/synapse
|
tests/util/test_file_consumer.py
|
Python
|
apache-2.0
| 5,022
| 0.000398
|
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
from io import StringIO
from unittest.mock import NonCallableMock
from twisted.internet import defer, reactor
from synapse.util.file_consumer import BackgroundFileConsumer
from tests import unittest
class FileConsumerTests(unittest.TestCase):
@defer.inlineCallbacks
def test_pull_consumer(self):
string_file = StringIO()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
producer = DummyPullProducer()
yield producer.register_with_consumer(consumer)
yield producer.write_and_wait("Foo")
self.assertEqual(string_file.getvalue(), "Foo")
yield producer.write_and_wait("Bar")
self.assertEqual(string_file.getvalue(), "FooBar")
finally:
consumer.unregisterProducer()
yield consumer.wait()
self.assertTrue(string_file.closed)
@defer.inlineCallbacks
def test_push_consumer(self):
string_file = BlockingStringWrite()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
producer = NonCallableMock(spec_set=[])
consumer.registerProducer(producer, True)
consumer.write("Foo")
yield string_file.wait_for_n_writes(1)
self.assertEqual(string_file.buffer, "Foo")
consumer.write("Bar")
yield string_file.wait_for_n_writes(2)
self.assertEqual(string_file.buffer, "FooBar")
finally:
consumer.unregisterProducer()
yield consumer.wait()
self.assertTrue(string_file.closed)
@defer.inlineCallbacks
def test_push_producer_feedback(self):
string_file = BlockingStringWrite()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
producer = NonCallableMock(spec_set=["pauseProducing", "resumeProducing"])
resume_deferred = defer.Deferred()
producer.resumeProducing.side_effect = lambda: resume_deferred.callback(
None
)
consumer.registerProducer(producer, True)
number_writes = 0
with string_file.write_lock:
for _ in range(consumer._PAUSE_ON_QUEUE_SIZE):
consumer.write("Foo")
number_writes += 1
producer.pauseProducing.assert_called_once()
yield string_file.wait_for_n_writes(number_writes)
yield
|
resume_deferred
producer.resumeProducing.assert_called_once()
finally:
consumer.unregisterProducer()
yield consumer.wait()
self.assertTrue(string_file.closed)
class DummyPullProducer:
def __init__(self):
self.consumer = None
self.deferred = defer.Deferred()
def resumeProducing(self):
d = self.deferred
self.deferred = defer.Defe
|
rred()
d.callback(None)
def write_and_wait(self, bytes):
d = self.deferred
self.consumer.write(bytes)
return d
def register_with_consumer(self, consumer):
d = self.deferred
self.consumer = consumer
self.consumer.registerProducer(self, False)
return d
class BlockingStringWrite:
def __init__(self):
self.buffer = ""
self.closed = False
self.write_lock = threading.Lock()
self._notify_write_deferred = None
self._number_of_writes = 0
def write(self, bytes):
with self.write_lock:
self.buffer += bytes
self._number_of_writes += 1
reactor.callFromThread(self._notify_write)
def close(self):
self.closed = True
def _notify_write(self):
"Called by write to indicate a write happened"
with self.write_lock:
if not self._notify_write_deferred:
return
d = self._notify_write_deferred
self._notify_write_deferred = None
d.callback(None)
@defer.inlineCallbacks
def wait_for_n_writes(self, n):
"Wait for n writes to have happened"
while True:
with self.write_lock:
if n <= self._number_of_writes:
return
if not self._notify_write_deferred:
self._notify_write_deferred = defer.Deferred()
d = self._notify_write_deferred
yield d
|
jarvisji/ScrapyCrawler
|
CrawlWorker/spiders/serverfault.py
|
Python
|
apache-2.0
| 396
| 0.002525
|
__author__ = 'Ting'
from CrawlWorker.spiders.stackoverflow import StackOverflowSpider
class ServerFaultSpider(StackOverflowSpider):
name = 'ServerFaultSpider'
allowed_domains = ['serverfault.com']
def __init__(self, op=None, **kwargs):
StackOverflowSpider.__init__(self, op, **kwargs)
def get_feed_start_urls(self):
return ['http:
|
//serverfault.com/questions']
|
|
redbox-mint/redbox
|
config/src/main/config/home/harvest/workflows/simpleworkflow-rules.py
|
Python
|
gpl-2.0
| 356
| 0.030899
|
import sys
import os
from com.googlecode.fascinator.common import FascinatorHome
sys.path.append(
|
os.path.join(FascinatorHome.getPath(),"harvest", "workflows"))
from
|
baserules import BaseIndexData
class IndexData(BaseIndexData):
def __activate__(self, context):
BaseIndexData.__activate__(self,context)
|
JamesClough/networkx
|
networkx/algorithms/tests/test_mis.py
|
Python
|
bsd-3-clause
| 3,517
| 0.007108
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: test_maximal_independent_set.py 577 2011-03-01 06:07:53Z lleeoo $
"""
Tests for maximal (not maximum) independent sets.
"""
# Copyright (C) 2004-2016 by
# Leo Lopes <leo.lopes@monash.edu>
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """Leo Lopes (leo.lopes@monash.edu)"""
from nose.tools import *
import networkx as nx
import random
class TestMaximalIndependantSet(object):
def setup(self):
self.florentine = nx.Graph()
self.florentine.add_edge('Acciaiuoli','Medici')
self.florentine.add_edge('Castellani','Peruzzi')
self.florentine.add_edge('Castellani','Strozzi')
self.florentine.add_edge('Castellani','Barbadori')
self.florentine.add_edge('Medici','Barbadori')
self.florentine.add_edge('Medici','Ridolfi')
self.florentine.add_edge('Medici','Tornabuoni')
self.florentine.add_edge('Medici','Albizzi')
self.florentine.add_edge('Medici','Salviati')
self.florentine.add_edge('Salviati','Pazzi')
self.florentine.add_edge('Peruzzi','Strozzi')
self.florentine.add_edge('Peruzzi','Bischeri')
self.florentine.add_edge('Strozzi','Ridolfi')
self.florentine.add_edge('Strozzi','Bischeri')
self.florentine.add_edge('Ridolfi','Tornabuoni')
self.florentine.add_edge('Tornabuoni','Guadagni')
self.florentine.add_edge('Albizzi','Ginori')
self.florentine.add_edge('Albizzi','Guadagni')
self.florentine.add_edge('Bischeri','Guadagni')
self.florentine.add_edge('Guadagni','Lamberteschi')
def test_K5(self):
"""Maximal independent set: K5"""
G = nx.complete_graph(5)
for node in G:
assert_equal(nx.maximal_independent_set(G, [node]), [node])
def test_K55(self):
"""Maximal independent set: K55"""
G = nx.complete_graph(55)
for node in G:
assert_equal(nx.maximal_independent_set(G, [node]), [node])
def test_exception(self):
"""Bad input should raise exception."""
G = self.florentine
assert_raises(nx.NetworkXUnfeasible,
nx.maximal_independent_set, G, ["Smith"])
assert_raises(nx.NetworkXUnfeasible,
nx.maximal_independent_set, G, ["Salviati", "Pazzi"])
def test_florentine_family(self):
G = self.florentine
indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
asse
|
rt_equal(sorted(indep),
sorted(["Medici", "Bischeri", "Castellani", "Pazzi",
"Ginori", "Lamberteschi"]))
def test_bipartite(self):
G = nx.complete_bipartite_graph(12, 34)
indep = nx.maximal_independent_set(G, [4, 5, 9, 10])
assert_equal(sorted(indep), list(range(12)
|
))
def test_random_graphs(self):
"""Generate 50 random graphs of different types and sizes and
make sure that all sets are independent and maximal."""
for i in range(0, 50, 10):
G = nx.random_graphs.erdos_renyi_graph(i*10+1, random.random())
IS = nx.maximal_independent_set(G)
assert_false(list(G.subgraph(IS).edges()))
neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
for v in set(G.nodes()).difference(IS):
assert_true(v in neighbors_of_MIS)
|
mcmaxwell/idea_digital_agency
|
idea/feincms/module/page/extensions/excerpt.py
|
Python
|
mit
| 754
| 0
|
"""
Add an excerpt field to the page.
"""
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_laz
|
y as _
from feincms import extensions
class Extension(extensions.Extension):
def handle_model(self):
self.model.add_to_class(
'excerpt',
models.TextField(
_('excerpt'),
blank=True,
help_text=_(
'Add a brief excerpt summarizing the content'
' of this page.')))
def handle_modeladmin(self, modeladmin):
modeladmi
|
n.add_extension_options(_('Excerpt'), {
'fields': ('excerpt',),
'classes': ('collapse',),
})
|
Gebesa-Dev/Addons-gebesa
|
account_prepayment_return/models/__init__.py
|
Python
|
agpl-3.0
| 151
| 0
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0
|
or later (http://www.gnu.org/licenses/agpl.
|
html).
from . import account_payment
|
rendermotion/RMPY
|
Core/data_save_load.py
|
Python
|
lgpl-3.0
| 3,429
| 0.001458
|
from RMPY.representations import curve
from RMPY.creators import skinCluster
import pymel.core as pm
from RMPY.core import config
import os
def save_curve(*args):
"""
:param args: the scene objects that will be saved if nothing is provide it it will try to save the selection.
:return:
"""
if args:
scene_curves = args
else:
scene_curves = pm.ls(selection=True)
saved_curves_list = []
for each in scene_curves:
try:
if pm.objExists(each):
curve_node = curve.Curve.by_name(each)
curve_node.save()
saved_curves_list.append(each)
else:
print "the curve {} doesn't exists".format(each)
except RuntimeWarning('{} not saved'.format):
pass
print 'following curves where saved: {}'.format(saved_curves_list)
def load_curves(*args):
"""
:param args: the scene objects that will be loaded if nothing is provide it it will try to load the selection.
:return:
"""
if args:
scene_curves = args
else:
scene_curves = pm.ls(selection=True)
for each in scene_curves:
try:
if pm.objExists(each):
curve_node = curve.Curve.by_name(each)
curve_node.load()
curve_node.set_repr_dict()
else:
print "the curve {} doesn't exists".format(each)
except RuntimeWarning('{} not loaded'.format):
pass
def save_skin_cluster(*args):
if args:
scene_objects = args
else:
scene_objects = pm.ls(selection=True)
saved_skin_cluster_list = []
for each_node in scene_objects:
try:
|
skin_cluster01 = skinCluster.SkinCluster.by_node(each_node)
if skin_cluster01:
|
skin_cluster01.save('{}'.format(each_node))
saved_skin_cluster_list.append(each_node)
else:
print "object {} does'nt have a skincluster".format(each_node)
except RuntimeWarning('{} not saved'.format(each_node)):
pass
print 'following skin in nodes where saved: {}'.format(saved_skin_cluster_list)
def load_skin_cluster(*args):
if args:
scene_objects = args
else:
scene_objects = pm.ls(selection=True)
for each_node in scene_objects:
try:
skin_cluster01 = skinCluster.SkinCluster()
skin_cluster01.load('{}'.format(each_node))
skin_cluster01.apply_weights_dictionary(geometry=pm.ls(each_node)[0])
except RuntimeWarning('{} not loaded'.format(each_node)):
pass
def export_maya_file(**kwargs):
file_name = kwargs.pop('file_name', 'reference_points')
full_path = '{}/mayaFiles'.format(config.file_path)
pm.exportSelected('{}/{}.ma'.format(full_path, file_name))
def import_maya_file(file_name):
full_path = '{}/mayaFiles'.format(config.file_path)
pm.importFile('{}/{}.ma'.format(full_path, file_name))
def import_all_available_maya_files():
for each in available_maya_files():
import_maya_file(each)
def available_maya_files():
full_path = '{}/mayaFiles'.format(config.file_path)
available_files = []
for each in os.listdir(full_path):
if '.ma' in each:
available_files.append(each)
return each
if __name__ == '__main__':
load_skin_cluster()
|
tfiedor/perun
|
perun/collect/trace/systemtap/engine.py
|
Python
|
gpl-3.0
| 30,849
| 0.003695
|
""" The SystemTap engine implementation.
"""
import time
import os
from subprocess import PIPE, STDOUT, DEVNULL, TimeoutExpired
import perun.collect.trace.systemtap.parse_compact as parse_compact
import perun.collect.trace.collect_engine as engine
import perun.collect.trace.systemtap.script_compact as stap_script_compact
from perun.collect.trace.watchdog import WATCH_DOG
from perun.collect.trace.threads import PeriodicThread, NonBlockingTee, TimeoutThread
from perun.collect.trace.values import FileSize, OutputHandling, check, RecordType, \
LOG_WAIT, HARD_TIMEOUT, CLEANUP_TIMEOUT, CLEANUP_REFRESH, HEARTBEAT_INTERVAL, \
STAP_MODULE_REGEX, PS_FORMAT, STAP_PHASES
import perun.utils as utils
import perun.utils.metrics as metrics
from perun.utils.helpers import SuppressedExceptions
from perun.logic.locks import LockType, ResourceLock, get_active_locks_for
from perun.utils.exceptions import SystemTapStartupException, SystemTapScriptCompilationException
class SystemTapEngine(engine.CollectEngine):
""" The SystemTap engine class, derived from the base CollectEngine.
:ivar str script: a full path to the systemtap script file
:ivar str log: a full path to the systemtap log file
:ivar str data: a full path to the file containing the raw performance data
:ivar str capture: a full path to the file containing the captured stdout and stderr of the
profiled command
:ivar ResourceLock lock_binary: the binary lock object
:ivar ResourceLock lock_stap: the SystemTap process lock object
:ivar ResourceLock lock_module: the SystemTap module lock object
:ivar Subprocess.Popen stap_compile: the script compilation subprocess object
:ivar Subprocess.Popen stap_collect: the SystemTap collection subprocess object
:ivar str stap_module: the name of the compiled SystemTap module
:ivar str stapio: the stapio process PID
:ivar Subprocess.Popen profiled_command: the profiled command subprocess object
"""
name = 'stap'
def __init__(self, config):
""" Creates the engine object according to the supplied configuration.
:param Configuration config: the configuration object
"""
super().__init__(config)
self.script = self._assemble_file_name('script', '
|
.stp')
self.log = self._assemble_file_name('log', '.txt')
self.data = self._assemble_file_name('data', '.txt')
self.capture = self._assemble_file_name('capture', '.txt')
# Syst
|
emTap specific dependencies
self.__dependencies = ['stap', 'lsmod', 'rmmod']
# Locks
binary_name = os.path.basename(self.executable.cmd)
self.lock_binary = ResourceLock(LockType.Binary, binary_name, self.pid, self.locks_dir)
self.lock_stap = ResourceLock(
LockType.SystemTap, 'process_{}'.format(binary_name), self.pid, self.locks_dir
)
self.lock_module = None
self.stap_compile = None
self.stap_collect = None
self.stap_module = None
self.stapio = None
self.profiled_command = None
# Create the collection files
super()._create_collect_files([self.script, self.log, self.data, self.capture])
# Lock the binary immediately
self.lock_binary.lock()
def check_dependencies(self):
""" Check that the SystemTap related dependencies are available.
"""
check(self.__dependencies)
def available_usdt(self, **_):
"""Extract USDT probe locations from the supplied binary files and libraries.
:return dict: the names of the available USDT locations per binary file
"""
# Extract the USDT probe locations from the binary
# note: stap -l returns code '1' if there are no USDT probes
return {
target: list(_parse_usdt_name(_extract_usdt_probes(target)))
for target in self.targets
}
def assemble_collect_program(self, **kwargs):
""" Assemble the SystemTap collection script according to the specified probes.
:param kwargs: the configuration parameters
"""
stap_script_compact.assemble_system_tap_script(self.script, **kwargs)
def collect(self, config, **_):
"""Collects performance data using the SystemTap wrapper, assembled script and the
executable.
:param Configuration config: the configuration object
"""
# Check that the lock for binary is still valid and log resources with corresponding locks
self.lock_binary.check_validity()
WATCH_DOG.log_resources(*_check_used_resources(self.locks_dir))
# Open the log file for collection
with open(self.log, 'w') as logfile:
# Assemble the SystemTap command and log it
stap_cmd = ('sudo stap -g --suppress-time-limits -s5 -v {} -o {}'
.format(self.script, self.data))
compile_cmd = stap_cmd
if config.stap_cache_off:
compile_cmd += ' --poison-cache'
WATCH_DOG.log_variable('stap_cmd', stap_cmd)
# Compile the script, extract the module name from the compilation log and lock it
self._compile_systemtap_script(compile_cmd, logfile)
self._lock_kernel_module(self.log)
# Run the SystemTap collection
self._run_systemtap_collection(stap_cmd, logfile, config)
def transform(self, **kwargs):
""" Transforms the raw performance data into the perun resources
:param kwargs: the configuration parameters
:return iterable: a generator object that produces the resources
"""
return parse_compact.trace_to_profile(self.data, **kwargs)
def cleanup(self, config, **_):
""" Cleans up the SystemTap resources that are still being used.
Specifically, terminates any still running processes - compilation, collection
or the profiled executable - and any related spawned child processes.
Unloads the kernel module if it is still loaded and unlocks all the resource locks.
:param config: the configuration parameters
"""
WATCH_DOG.info('Releasing and cleaning up the SystemTap-related resources')
# Terminate perun related processes that are still running
self._cleanup_processes()
# Unload the SystemTap kernel module if still loaded and unlock it
# The kernel module should already be unloaded since terminating the SystemTap collection
# process automatically unloads the module
self._cleanup_kernel_module()
# Zip and delete (both optional) the temporary collect files
self._finalize_collect_files(
['script', 'log', 'data', 'capture'], config.keep_temps, config.zip_temps
)
def _compile_systemtap_script(self, command, logfile):
""" Compiles the SystemTap script without actually running it.
This step allows the trace collector to identify the resulting kernel module, check if
the module is not already being used and to lock it.
:param str command: the 'stap' compilation command to run
:param TextIO logfile: the handle of the opened SystemTap log file
"""
WATCH_DOG.info('Attempting to compile the SystemTap script into a kernel module. '
'This may take a while depending on the number of probe points.')
# Lock the SystemTap process we're about to start
# No need to check the lock validity more than once since the SystemTap lock is tied
# to the binary file which was already checked
self.lock_stap.lock()
# Run the compilation process
# Fetch the password so that the preexec_fn doesn't halt
utils.run_safely_external_command('sudo sleep 0')
# Run only the first 4 phases of the stap command, before actually running the collection
with utils.nonblocking_subprocess(
command + ' -p 4', {'stderr': logfile, 'stdout': PIPE, 'preexec_fn': os.setpgrp},
self._terminate_process, {'proc_name': 'stap_compile'}
) as compilati
|
LoveKano/hs_django_blog
|
blog/apps.py
|
Python
|
mit
| 156
| 0
|
# -*- coding: utf-8 -*-
fro
|
m __future__ import unicode_literals
from django.apps import AppConfig
class BlogConfig(AppConfig):
|
name = 'blog'
|
tomkralidis/geonode
|
geonode/monitoring/migrations/0003_monitoring_resources.py
|
Python
|
gpl-3.0
| 572
| 0.001748
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
class
|
Migration(migrations.Migration):
dependencies = [
('monitoring', '0002_monitoring_update'),
]
operations = [
migrations.RemoveField(
model_name='requestevent',
name='resources',
),
migrations.AddField(
model_name='requestevent',
name='resources',
field=models.ManyToManyField(hel
|
p_text='List of resources affected', to='monitoring.MonitoredResource', null=True, blank=True),
),
]
|
ohjay/ohjay.github.io
|
cs61a/sp17/mt1_review/code.py
|
Python
|
mit
| 754
| 0.007958
|
"""
Code for MT1 review worksheet.
Direct all complaints to Owen Jow (owenjow@berkeley).
"""
# Part 1: Control
x = (0 and 1 and 2) + (0 or 1 or 2)
((-4 or 0) and 4) / (-2 or (0 and 2))
if x <= 1:
|
print('hello')
elif x <= 2:
print(' world')
if x <= 3:
print(' my name is inigo montoya')
else:
print(' from the other side')
# Part 2: HOF / Lambdas
def f(v, x):
def g(y, z):
return y(x, z)(z, x)
return g
u = lambda y, x: y * 4
v = lambda x, y: x * 3 + y
f(u, 1)(lambda x, y: lambda y, x: y * 3 + v(x, y), 2)
# Part 4: Environment Diagrams
def f(x, h):
def g(y, i):
f = i[:]
h = [f, lambda: g(5, h)]
return h
return g(4, h)
x, y
|
= 6, 7
f = f(3, [lambda: x * y])
g = f[-1]()[0][0][0]()
|
Azwok/Thremo
|
magnification_map.py
|
Python
|
gpl-2.0
| 249
| 0.004016
|
import numpy as np
import unittest
import pycuda.driver as drv
import pycuda.compiler
impor
|
t pycuda.autoinit
import pycuda.gpuarray as gpuarray
import pycuda.cumath as cumath
from pycuda.compiler import Source
|
Module
__author__ = 'AlistairMcDougall'
|
jinglining/flink
|
flink-python/pyflink/fn_execution/tests/test_process_mode_boot.py
|
Python
|
apache-2.0
| 6,732
| 0.002525
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import time
import unittest
import grpc
from apache_beam.portability.api.beam_provision_api_pb2 import (ProvisionInfo,
GetProvisionInfoResponse)
from apache_beam.portability.api.beam_provision_api_pb2_grpc import (
ProvisionServiceServicer, add_ProvisionServiceServicer_to_server)
from concurrent import futures
from google.protobuf import json_format
from pyflink.java_gateway import get_gateway
from pyflink.pyflink_gateway_server import on_windows
from pyflink.testing.test_case_utils import PyFlinkTestCase
class PythonBootTests(PyFlinkTestCase):
def setUp(self):
provision_info = json_format.Parse('{"retrievalToken": "test_token"}', ProvisionInfo())
response = GetProvisionInfoResponse(info=provision_info)
def get_unused_port():
sock = socket.socket()
sock.bind(('', 0))
port = sock.getsockname()[1]
sock.close()
return port
class ProvisionService(ProvisionServiceServicer):
def GetProvisionInfo(self, request, context):
return response
def start_test_provision_server():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
add_ProvisionServiceServicer_to_server(ProvisionService(), server)
port = get_unused_port()
server.add_insecure_port('[::]:' + str(port))
server.start()
retur
|
n server, port
self.provision_server, self.provision_port = start_test_provision_server()
self.env = dict(os.environ)
self.env["python"] = sys.executable
self.env["FLINK_BOOT_TESTING"] = "1"
self.env["BOOT_LOG_DIR"] = os.path.join(self.env["FLIN
|
K_HOME"], "log")
self.tmp_dir = tempfile.mkdtemp(str(time.time()), dir=self.tempdir)
# assume that this file is in flink-python source code directory.
flink_python_source_root = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
runner_script = "pyflink-udf-runner.bat" if on_windows() else \
"pyflink-udf-runner.sh"
self.runner_path = os.path.join(
flink_python_source_root, "bin", runner_script)
def run_boot_py(self):
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000",
"--artifact_endpoint", "whatever",
"--provision_endpoint", "localhost:%d" % self.provision_port,
"--control_endpoint", "localhost:0000",
"--semi_persist_dir", self.tmp_dir]
return subprocess.call(args, env=self.env)
def test_python_boot(self):
exit_code = self.run_boot_py()
self.assertTrue(exit_code == 0, "the boot.py exited with non-zero code.")
@unittest.skipIf(on_windows(), "'subprocess.check_output' in Windows always return empty "
"string, skip this test.")
def test_param_validation(self):
args = [self.runner_path]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No id provided.", exit_message)
args = [self.runner_path, "--id", "1"]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No logging endpoint provided.", exit_message)
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000"]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No provision endpoint provided.", exit_message)
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000",
"--provision_endpoint", "localhost:%d" % self.provision_port]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No control endpoint provided.", exit_message)
def test_set_working_directory(self):
JProcessPythonEnvironmentManager = \
get_gateway().jvm.org.apache.flink.python.env.ProcessPythonEnvironmentManager
output_file = os.path.join(self.tmp_dir, "output.txt")
pyflink_dir = os.path.join(self.tmp_dir, "pyflink")
os.mkdir(pyflink_dir)
# just create an empty file
open(os.path.join(pyflink_dir, "__init__.py"), 'a').close()
fn_execution_dir = os.path.join(pyflink_dir, "fn_execution")
os.mkdir(fn_execution_dir)
open(os.path.join(fn_execution_dir, "__init__.py"), 'a').close()
with open(os.path.join(fn_execution_dir, "boot.py"), "w") as f:
f.write("import os\nwith open(r'%s', 'w') as f:\n f.write(os.getcwd())" %
output_file)
# test if the name of working directory variable of udf runner is consist with
# ProcessPythonEnvironmentManager.
self.env[JProcessPythonEnvironmentManager.PYTHON_WORKING_DIR] = self.tmp_dir
self.env["python"] = sys.executable
args = [self.runner_path]
subprocess.check_output(args, env=self.env)
process_cwd = None
if os.path.exists(output_file):
with open(output_file, 'r') as f:
process_cwd = f.read()
self.assertEqual(os.path.realpath(self.tmp_dir),
process_cwd,
"setting working directory variable is not work!")
def tearDown(self):
self.provision_server.stop(0)
try:
if self.tmp_dir is not None:
shutil.rmtree(self.tmp_dir)
except:
pass
|
jcherqui/searx
|
searx/engines/gigablast.py
|
Python
|
agpl-3.0
| 3,104
| 0.000966
|
"""
Gigablast (Web)
@website https://gigablast.com
@provide-api yes (https://gigablast.com/api.html)
@using-api yes
@results XML
@stable yes
@parse url, title, content
"""
import random
from json import loads
from time import time
from lxml.html import fromstring
from searx.url_utils import urlencode
# engine dependent config
categories = ['general']
paging = True
number_of_results = 10
language_support = True
safesearch = True
# search-url
base_url = 'https://gigablast.com/'
search_string = 'search?{query}'\
'&n={number_of_results}'\
'&c=main'\
'&s={offset}'\
'&format=json'\
'&qh=0'\
'&qlang={lang}'\
'&ff={safesearch}'\
'&rxiec={rxieu}'\
'&ulse={ulse}'\
'&rand={rxikd}'\
'&dbez={dbez}'
# specific xpath variables
results_xpath = '//response//result'
url_xpath = './/url'
title_xpath = './/title'
content_xpath = './/sum'
supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
# do search-request
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
if params['language'] == 'all':
language = 'xx'
else:
language = params['language'].replace('-', '_').lower()
if language.split('-')[0] != 'zh':
|
language = language.split('-')[0]
if params['safesearch'] >= 1:
safesearch = 1
else:
safesearch = 0
# rxieu is some kind of hash from the search query, but accepts random atm
search_path = search_string.format(query=urlencode({'q': query}),
offset=offset,
number_of_results=number_of_results,
|
rxikd=int(time() * 1000),
rxieu=random.randint(1000000000, 9999999999),
ulse=random.randint(100000000, 999999999),
lang=language,
safesearch=safesearch,
dbez=random.randint(100000000, 999999999))
params['url'] = base_url + search_path
return params
# get response from search-request
def response(resp):
results = []
# parse results
response_json = loads(resp.text)
for result in response_json['results']:
# append result
results.append({'url': result['url'],
'title': result['title'],
'content': result['sum']})
# return results
return results
# get supported languages from their site
def _fetch_supported_languages(resp):
supported_languages = []
dom = fromstring(resp.text)
links = dom.xpath('//span[@id="menu2"]/a')
for link in links:
href = link.xpath('./@href')[0].split('lang%3A')
if len(href) == 2:
code = href[1].split('_')
if len(code) == 2:
code = code[0] + '-' + code[1].upper()
else:
code = code[0]
supported_languages.append(code)
return supported_languages
|
mjysci/ud-dlnd-side-proj
|
p2_miniflow_mnist/miniflow_old.py
|
Python
|
gpl-3.0
| 8,379
| 0.001193
|
import numpy as np
class Node(object):
"""
Base class for nodes in the network.
Arguments:
`inbound_nodes`: A list of nodes with edges into this node.
"""
def __init__(self, inbound_nodes=[]):
"""
Node's constructor (runs when the object is instantiated). Sets
properties that all nodes need.
"""
# A list of nodes with edges into this node.
self.inbound_nodes = inbound_nodes
# The eventual value of this node. Set by running
# the forward() method.
self.value = None
# A list of nodes that this node outputs to.
self.outbound_nodes = []
# New property! Keys are the inputs to this node and
# their values are the partials of this node with
# respect to that input.
self.gradients = {}
# Sets this node as an outbound node for all of
# this node's inputs.
for node in inbound_nodes:
node.outbound_nodes.append(self)
def forward(self):
"""
Every node that uses this class as a base class will
need to define its own `forward` method.
"""
raise NotImplementedError
def backward(self):
"""
Every node that uses this class as a base class will
need to define its own `backward` method.
"""
raise NotImplementedError
class Input(Node):
"""
A generic input into the network.
"""
def __init__(self):
# The base class constructor has to run to set all
# the properties here.
#
# The most important property on an Input is value.
# self.value is set during `topological_sort` later.
Node.__init__(self)
def forward(self):
# Do nothing because nothing is calculated.
pass
def backward(self):
# An Input node has no inputs so the gradient (derivative)
# is zero.
# The key, `self`, is reference to this object.
self.gradients = {self: 0}
# Weights and bias may be inputs, so you need to sum
# the gradient from output gradients.
for n in self.outbound_nodes:
self.gradients[self] += n.gradients[self]
class Linear(Node):
"""
Represents a node that performs a linear transform.
"""
def __init__(self, X, W, b):
# The base class (Node) constructor. Weights and bias
# are treated like inbound nodes.
Node.__init__(self, [X, W, b])
def forward(self):
"""
Performs the math behind a linear transform.
"""
X = self.inbound_nodes[0].value
W = self.inbound_nodes[1].value
b = self.inbound_nodes[2].value
self.value = np.dot(X, W) + b
def backward(self):
"""
Calculates the gradient based on the output values.
"""
# Initialize a partial for each of the inbound_nodes.
self.gradients = {n: np.zeros_like(n.value) for n in self.inbound_nodes}
# Cycle through the outputs. The gradient will change depending
# on each output, so the gradients are summed over all outputs.
for n in self.outbound_nodes:
# Get the partial of the cost with respect to this node.
grad_cost = n.gradients[self]
# Set the partial of the loss with respect to this node's inputs.
self.gradients[self.inbound_nodes[0]] += np.dot(grad_cost, self.inbound_nodes[1].value.T)
# Set the partial of the loss with respect to this node's weights.
self.gradients[self.inbound_nodes[1]] += np.dot(self.inbound_nodes[0].value.T, grad_cost)
# Set the partial of the loss with respect to this node's bias.
self.gradients[self.inb
|
ound_nodes[2]] += np.sum(grad_cost, axis=0, keepdims=False)
class Sigmoid(Node):
"""
Represents a node that performs the sigmoid activation function.
"""
def __init__(self, node):
|
# The base class constructor.
Node.__init__(self, [node])
def _sigmoid(self, x):
"""
This method is separate from `forward` because it
will be used with `backward` as well.
`x`: A numpy array-like object.
"""
return 1. / (1. + np.exp(-x))
def forward(self):
"""
Perform the sigmoid function and set the value.
"""
input_value = self.inbound_nodes[0].value
self.value = self._sigmoid(input_value)
def backward(self):
"""
Calculates the gradient using the derivative of
the sigmoid function.
"""
# Initialize the gradients to 0.
self.gradients = {n: np.zeros_like(n.value) for n in self.inbound_nodes}
# Sum the partial with respect to the input over all the outputs.
for n in self.outbound_nodes:
grad_cost = n.gradients[self]
sigmoid = self.value
self.gradients[self.inbound_nodes[0]] += sigmoid * (1 - sigmoid) * grad_cost
class MSE(Node):
def __init__(self, y, a):
"""
The mean squared error cost function.
Should be used as the last node for a network.
"""
# Call the base class' constructor.
Node.__init__(self, [y, a])
def forward(self):
"""
Calculates the mean squared error.
"""
# NOTE: We reshape these to avoid possible matrix/vector broadcast
# errors.
#
# For example, if we subtract an array of shape (3,) from an array of shape
# (3,1) we get an array of shape(3,3) as the result when we want
# an array of shape (3,1) instead.
#
# Making both arrays (3,1) insures the result is (3,1) and does
# an elementwise subtraction as expected.
y = self.inbound_nodes[0].value.reshape(-1, 1)
a = self.inbound_nodes[1].value.reshape(-1, 1)
self.m = self.inbound_nodes[0].value.shape[0]
# Save the computed output for backward.
self.diff = y - a
self.value = np.mean(self.diff**2)
def backward(self):
"""
Calculates the gradient of the cost.
"""
self.gradients[self.inbound_nodes[0]] = (2 / self.m) * self.diff
self.gradients[self.inbound_nodes[1]] = (-2 / self.m) * self.diff
def topological_sort(feed_dict):
"""
Sort the nodes in topological order using Kahn's Algorithm.
`feed_dict`: A dictionary where the key is a `Input` Node and the value is the respective value feed to that Node.
Returns a list of sorted nodes.
"""
input_nodes = [n for n in feed_dict.keys()]
G = {}
nodes = [n for n in input_nodes]
while len(nodes) > 0:
n = nodes.pop(0)
if n not in G:
G[n] = {'in': set(), 'out': set()}
for m in n.outbound_nodes:
if m not in G:
G[m] = {'in': set(), 'out': set()}
G[n]['out'].add(m)
G[m]['in'].add(n)
nodes.append(m)
L = []
S = set(input_nodes)
while len(S) > 0:
n = S.pop()
if isinstance(n, Input):
n.value = feed_dict[n]
L.append(n)
for m in n.outbound_nodes:
G[n]['out'].remove(m)
G[m]['in'].remove(n)
# if no other incoming edges add to S
if len(G[m]['in']) == 0:
S.add(m)
return L
def forward_and_backward(graph):
"""
Performs a forward pass and a backward pass through a list of sorted Nodes.
Arguments:
`graph`: The result of calling `topological_sort`.
"""
# Forward pass
for n in graph:
n.forward()
# Backward pass
# see: https://docs.python.org/2.3/whatsnew/section-slices.html
for n in graph[::-1]:
n.backward()
def sgd_update(trainables, learning_rate=1e-2):
"""
Updates the value of each trainable with SGD.
Arguments:
`trainables`: A list of `Input` Nodes representing weights/biases.
`learning_rate`: The learning rate.
"""
# TODO: update all the `trainables` with SGD
# You can access and assign the value of a trainable with `value
|
ritashugisha/ASUEvents
|
ASUEvents/managers/migrations/0012_auto_20150422_2019.py
|
Python
|
mit
| 511
| 0.001957
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration
|
):
dependencies = [
('managers', '0011_auto_20150422_2018'),
]
operations = [
migrations.AlterField(
model_name='managerprofile',
name='picture',
field=models.ImageField(default=b'/static/assets/admin/layout/img/avatar.jpg'
|
, upload_to=b'profiles'),
preserve_default=True,
),
]
|
murat1985/bagpipe-bgp
|
bagpipe/bgp/bgp_daemon.py
|
Python
|
apache-2.0
| 11,381
| 0.000088
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# encoding: utf-8
# Copyright 2014 Orange
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the
|
License for the specific language governing permissions and
# limitations under the License.
impor
|
t os.path
import sys
from logging import Logger
import logging.config
# import logging_tree
import traceback
from daemon import runner
import signal
from ConfigParser import SafeConfigParser, NoSectionError
from optparse import OptionParser
from bagpipe.bgp.common import utils
from bagpipe.bgp.common.looking_glass import LookingGlass, \
LookingGlassLogHandler
from bagpipe.bgp.engine.bgp_manager import Manager
from bagpipe.bgp.rest_api import RESTAPI
from bagpipe.bgp.vpn import VPNManager
def findDataplaneDrivers(dpConfigs, bgpConfig, isCleaningUp=False):
drivers = dict()
for vpnType in dpConfigs.iterkeys():
dpConfig = dpConfigs[vpnType]
if 'dataplane_driver' not in dpConfig:
logging.error(
"no dataplane_driver set for %s (%s)", vpnType, dpConfig)
driverName = dpConfig["dataplane_driver"]
logging.debug(
"Creating dataplane driver for %s, with %s", vpnType, driverName)
# FIXME: this is a hack, dataplane drivers should have a better way to
# access any item in the BGP dataplaneConfig
if 'dataplane_local_address' not in dpConfig:
dpConfig['dataplane_local_address'] = bgpConfig['local_address']
for tentativeClassName in (driverName,
'bagpipe.%s' % driverName,
'bagpipe.bgp.%s' % driverName,
'bagpipe.bgp.vpn.%s.%s' % (
vpnType, driverName),
):
try:
if '.' not in tentativeClassName:
logging.debug(
"Not trying to import '%s'", tentativeClassName)
continue
driverClass = utils.import_class(tentativeClassName)
try:
logging.info("Found driver for %s, initiating...", vpnType)
# skip the init step if called for cleanup
driver = driverClass(dpConfig, not isCleaningUp)
drivers[vpnType] = driver
logging.info(
"Successfully initiated dataplane driver for %s with"
" %s", vpnType, tentativeClassName)
except ImportError as e:
logging.debug(
"Could not initiate dataplane driver for %s with"
" %s: %s", vpnType, tentativeClassName, e)
except Exception as e:
logging.error(
"Found class, but error while instantiating dataplane"
" driver for %s with %s: %s", vpnType,
tentativeClassName, e)
logging.error(traceback.format_exc())
break
break
except SyntaxError as e:
logging.error(
"Found class, but syntax error while instantiating "
"dataplane driver for %s with %s: %s", vpnType,
tentativeClassName, e)
break
except Exception as e:
logging.debug(
"Could not initiate dataplane driver for %s with %s (%s)",
vpnType, tentativeClassName, e)
return drivers
class BgpDaemon(LookingGlass):
def __init__(self, catchAllLGLogHandler, **kwargs):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/null'
self.stderr_path = '/dev/null'
self.pidfile_path = '/var/run/bagpipe-bgp/bagpipe-bgp.pid'
self.pidfile_timeout = 5
logging.info("BGP manager configuration : %s", kwargs["bgpConfig"])
self.bgpConfig = kwargs["bgpConfig"]
logging.info("BGP dataplane dataplaneDriver configuration : %s",
kwargs["dataplaneConfig"])
self.dataplaneConfig = kwargs["dataplaneConfig"]
logging.info("BGP API configuration : %s", kwargs["apiConfig"])
self.apiConfig = kwargs["apiConfig"]
self.catchAllLGLogHandler = catchAllLGLogHandler
def run(self):
logging.info("Starting BGP component...")
logging.debug("Creating dataplane drivers")
drivers = findDataplaneDrivers(self.dataplaneConfig, self.bgpConfig)
for vpnType in self.dataplaneConfig.iterkeys():
if vpnType not in drivers:
logging.error(
"Could not initiate any dataplane driver for %s", vpnType)
return
logging.debug("Creating BGP manager")
self.bgpManager = Manager(self.bgpConfig)
logging.debug("Creating VPN manager")
self.vpnManager = VPNManager(self.bgpManager, drivers)
# BGP component REST API
logging.debug("Creating REST API")
bgpapi = RESTAPI(
self.apiConfig, self, self.vpnManager, self.catchAllLGLogHandler)
bgpapi.run()
def stop(self, signum, frame):
logging.info("Received signal %(signum)r, stopping...", vars())
self.vpnManager.stop()
self.bgpManager.stop()
# would need to stop main thread ?
logging.info("All threads now stopped...")
exception = SystemExit("Terminated on signal %(signum)r" % vars())
raise exception
def getLookingGlassLocalInfo(self, pathPrefix):
return {
"dataplane": self.dataplaneConfig,
"bgp": self.bgpConfig
}
def _loadConfig(configFile):
parser = SafeConfigParser()
if (len(parser.read(configFile)) == 0):
logging.error("Configuration file not found (%s)", configFile)
exit()
bgpConfig = parser.items("BGP")
dataplaneConfig = dict()
for vpnType in ['ipvpn', 'evpn']:
try:
dataplaneConfig[vpnType] = dict(
parser.items("DATAPLANE_DRIVER_%s" % vpnType.upper()))
except NoSectionError:
if vpnType == "ipvpn": # backward compat for ipvpn
dataplaneConfig['ipvpn'] = dict(
parser.items("DATAPLANE_DRIVER"))
logging.warning("Config file is obsolete, should have a "
"DATAPLANE_DRIVER_IPVPN section instead of"
" DATAPLANE_DRIVER")
else:
logging.error(
"Config file should have a DATAPLANE_DRIVER_EVPN section")
apiConfig = parser.items("API")
# TODO: add a default API config
config = {"bgpConfig": dict(bgpConfig),
"dataplaneConfig": dataplaneConfig,
"apiConfig": dict(apiConfig)
}
return config
def daemon_main():
usage = "usage: %prog [options] (see --help)"
parser = OptionParser(usage)
parser.add_option("--config-file", dest="configFile",
help="Set BGP component configuration file path",
default="/etc/bagpipe-bgp/bgp.conf")
parser.add_option("--log-file", dest="logFile",
help="Set logging configuration file path",
default="/etc/bagpipe-bgp/log.conf")
parser.add_option("--no-daemon", dest="daemon", action="store_false",
help="Do not daemonize", default=True)
(options, _) = parser.parse_args()
action = sys.argv[1]
assert(action == "start" or action == "stop")
if not os.path.isfile(options.logFile):
logging.basicConfig()
print
|
google-research/understanding-transfer-learning
|
third_party/chexpert_data/chexpert.py
|
Python
|
apache-2.0
| 7,524
| 0.000665
|
import os
import logging
import numpy as np
from torch.utils.data import Dataset
import cv2
from PIL import Image
import subprocess
import torchvision.transforms as tfs
np.random.seed(0)
def TransCommon(image):
image = cv2.equalizeHist(image)
image = cv2.GaussianBlur(image, (3, 3), 0)
return image
def TransAug(image):
img_aug = tfs.Compose([
tfs.RandomAffine(degrees=(-15, 15), translate=(0.05, 0.05),
scale=(0.95, 1.05), fillcolor=128)
])
image = img_aug(image)
return image
def GetTransforms(image, target=None, type='common'):
# taget is not support now
if target is not None:
raise Exception(
'Target is not support now ! ')
# get type
if type.strip() == 'Common':
image = TransCommon(image)
return image
elif type.strip() == 'None':
return image
elif type.strip() == 'Aug':
image = TransAug(image)
return image
else:
raise Exception(
'Unknown transforms_type : '.format(type))
class ImageDataset(Dataset):
def __init__(self, data_path, label_path, cfg, mode='train', subsample_size=-1, subsample_seed=1234):
self.cfg = cfg
self._label_header = None
self.data_path = data_path
self._image_paths = []
self._labels = []
self._mode = mode
self.dict = [{'1.0': '1', '': '0', '0.0': '0', '-1.0': '0'},
{'1.0': '1', '': '0', '0.0': '0', '-1.0': '1'}, ]
print(f'ImageDataset constructed with data_path = {self.data_path}')
with open(label_path) as f:
header = f.readline().strip('\n').split(',')
self._label_header = [
header[7],
header[10],
header[11],
header[13],
header[15]]
for line in f:
labels = []
fields = line.strip('\n').split(',')
image_path = os.path.join(self.data_path, os.path.expanduser(fields[0]))
flg_enhance = False
for index, value in enumerate(fields[5:]):
if index == 5 or index == 8:
labels.append(self.dict[1].get(value))
if self.dict[1].get(
value) == '1' and \
self.cfg.enhance_index.count(index) > 0:
flg_enhance = True
elif index == 2 or index == 6 or index == 10:
labels.append(self.dict[0].get(value))
if self.dict[0].get(
value) == '1' and \
self.cfg.enhance_index.count(index) > 0:
flg_enhance = True
# labels = ([self.dict.get(n, n) for n in fields[5:]])
labels = list(map(int, labels))
self._image_paths.append(image_path)
self._labels.append(labels)
if flg_enhance and self._mode == 'train':
for i in range(self.cfg.enhance_times):
self._image_paths.append(image_path)
self._labels.append(labels)
self._num_image = len(self._image_paths)
# NOTE(2020.04.30) we started using explicit config of data index, so disabling this dynamic subsampling
# features to avoid confusion.
assert subsample_size == -1
# if subsample_size > 0:
# if subsample_size > self._num_image:
# raise AssertionError(f'subsample_size ({subsample_size}) should be less than {self._num_image}')
# rng = np.random.RandomState(seed=subsample_seed)
# idx = rng.choice(self._num_image, size=subsample_size, replace=False)
|
# self._image_paths = [self._image_paths[i] for i in idx]
# self._labels = [self._labels[i] for i in idx]
# self._num_image = len(self._labels)
if cfg.cache_bitmap:
self._bitmap_cache = self._build_bitmap_cache()
else:
self._bitmap_cache = None
def __len__(self):
|
return self._num_image
def _border_pad(self, image):
h, w, c = image.shape
if self.cfg.border_pad == 'zero':
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode='constant', constant_values=0.0
)
elif self.cfg.border_pad == 'pixel_mean':
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode='constant', constant_values=self.cfg.pixel_mean
)
else:
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode=self.cfg.border_pad
)
return image
def _fix_ratio(self, image):
h, w, c = image.shape
if h >= w:
ratio = h * 1.0 / w
h_ = self.cfg.long_side
w_ = round(h_ / ratio)
else:
ratio = w * 1.0 / h
w_ = self.cfg.long_side
h_ = round(w_ / ratio)
image = cv2.resize(image, dsize=(w_, h_),
interpolation=cv2.INTER_LINEAR)
image = self._border_pad(image)
return image
def _build_bitmap_cache(self):
print('Pre-loading all images...(might take a while)')
return [self._load_image(idx) for idx in range(self._num_image)]
def _load_image(self, idx):
image = cv2.imread(self._image_paths[idx], 0)
image = Image.fromarray(image)
return image
def __getitem__(self, idx):
if self._bitmap_cache is not None:
image = self._bitmap_cache[idx]
else:
image = self._load_image(idx)
if self._mode == 'train':
image = GetTransforms(image, type=self.cfg.use_transforms_type)
image = np.array(image)
if self.cfg.use_equalizeHist:
image = cv2.equalizeHist(image)
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB).astype(np.float32)
if self.cfg.fix_ratio:
image = self._fix_ratio(image)
else:
image = cv2.resize(image, dsize=(self.cfg.width, self.cfg.height),
interpolation=cv2.INTER_LINEAR)
if self.cfg.gaussian_blur > 0:
image = cv2.GaussianBlur(image, (self.cfg.gaussian_blur,
self.cfg.gaussian_blur), 0)
# normalization
image -= self.cfg.pixel_mean
# vgg and resnet do not use pixel_std, densenet and inception use.
if self.cfg.use_pixel_std:
image /= self.cfg.pixel_std
# normal image tensor : H x W x C
# torch image tensor : C X H X W
image = image.transpose((2, 0, 1))
labels = np.array(self._labels[idx]).astype(np.float32)
path = self._image_paths[idx]
if self._mode == 'train' or self._mode == 'dev':
return (image, labels)
elif self._mode == 'test':
return (image, path)
elif self._mode == 'heatmap':
return (image, path, labels)
else:
raise Exception('Unknown mode : {}'.format(self._mode))
|
choldrim/jumpserver
|
apps/perms/apps.py
|
Python
|
gpl-2.0
| 126
| 0
|
from __future__ import unicode_literals
from django.apps import AppConfig
|
class Per
|
msConfig(AppConfig):
name = 'perms'
|
marcelosandoval/tekton
|
backend/appengine/routes/campapel/show.py
|
Python
|
mit
| 2,187
| 0.015546
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from gaepermission.decorator import login_not_required
from routes.campapel.home import returnIndex
from tekton import router
from tekton.gae.middleware.redirect import RedirectResponse
from gaeforms.ndb.form import ModelForm
from gaegraph.model import Node
from google.appengine.ext import ndb
from tekton.gae.middleware.redirect import RedirectResponse
from routes.campapel.modelo import CamPapel, CamPapelForm
@login_not_required
@no_csrf
def form(_resp):
contexto={'salvar_path':router.to_path(salvar)}
return TemplateResponse(contexto,'campapel/form.html')
@login_not_required
def salvar(**prop):
camPapelF=CamPapelForm(**prop)
erros=camPapelF.validate()
if erros:
contexto={'salvar_pat
|
h':router.to_path(salvar),
'erros':erros,
'camPapel':prop}
return TemplateResponse(contexto,'campapel/form.html')
else:
camPapel=camPapelF.fill_model()
camPapel.put()
return RedirectResponse(returnIndex())
@login_not_required
@no_csrf
def editar_form(camPapel_id):
camPapel_id=int(camPapel_id)
camPapel=CamPapel.get_by_id(camPapel_id)
conte
|
xto={'salvar_path':router.to_path(editar,camPapel_id),'camPapel':camPapel}
return TemplateResponse(contexto,template_path='campapel/form.html')
@login_not_required
def editar(camPapel_id,**prop):
camPapel_id=int(camPapel_id)
camPapel=CamPapel.get_by_id(camPapel_id)
camPapelF=CamPapelForm(**prop)
erros=camPapelF.validate()
if erros:
contexto={'salvar_path':router.to_path(editar),
'erros':erros,
'camPapel':camPapelF}
return TemplateResponse(contexto,'campapel/form.html')
else:
camPapelF.fill_model(camPapel)
camPapel.put()
return RedirectResponse(router.to_path(returnIndex()))
@login_not_required
def deletar(camPapel_id):
chave=ndb.Key(CamPapel,int(camPapel_id))
chave.delete()
return RedirectResponse(router.to_path(returnIndex()))
|
ric2b/Vivaldi-browser
|
chromium/content/test/gpu/determine_gold_inexact_parameters.py
|
Python
|
bsd-3-clause
| 4,018
| 0.007218
|
#!/usr/bin/env vpython3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import logging
import sys
import gold_inexact_matching.base_parameter_optimizer as base_optimizer
import gold_inexact_matching.binary_search_parameter_optimizer\
as binary_optimizer
import gold_inexact_matching.brute_force_parameter_optimizer as brute_optimizer
import gold_inexact_matching.local_minima_parameter_optimizer\
as local_optimizer
from gold_inexact_matching import optimizer_set
# Script to find suitable values for Skia Gold inexact matching.
#
# Inexact matching in Skia Gold has three tunable parameters:
# 1. The max number of differing pixels.
# 2. The max delta for any single pixel.
# 3. The threshold for a Sobel filter.
#
# Ideally, we use the following hierarchy of comparison approaches:
# 1. Exact matching.
# 2. Exact matching after a Sobel filter is applied.
# 3. Fuzzy matching after a Sobel filter is applied.
#
# However, there may be cases where only using a Sobel filter requires masking a
# very large amount of the image compared to Sobel + very conservative fuzzy
# matching.
#
# Even if such cases are not hit, the process of determining good values for the
# parameters is quite tedious since it requires downloading images from Gold and
# manually running multiple calls to `goldctl match`.
#
# This script attem
|
pts to remedy both issues by handling all of the trial and
# error and suggesting potential parameter values for the user to choose from.
def CreateArgumentParser():
parser = argparse.Argument
|
Parser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
script_parser = parser.add_argument_group('Script Arguments')
script_parser.add_argument('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more')
subparsers = parser.add_subparsers(help='Optimization algorithm')
binary_parser = subparsers.add_parser(
'binary_search',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Perform a binary search to optimize a single parameter. The best '
'option if you only want to tune one parameter.')
binary_parser.set_defaults(
clazz=binary_optimizer.BinarySearchParameterOptimizer)
binary_optimizer.BinarySearchParameterOptimizer.AddArguments(binary_parser)
local_parser = subparsers.add_parser(
'local_minima',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Perform a BFS to find local minima using weights for each '
'parameter. Slower than binary searching, but supports an arbitrary '
'number of parameters.')
local_parser.set_defaults(clazz=local_optimizer.LocalMinimaParameterOptimizer)
local_optimizer.LocalMinimaParameterOptimizer.AddArguments(local_parser)
brute_parser = subparsers.add_parser(
'brute_force',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Brute force all possible combinations. VERY, VERY slow, but can '
'potentially find better values than local_minima.')
brute_parser.set_defaults(clazz=brute_optimizer.BruteForceParameterOptimizer)
brute_optimizer.BruteForceParameterOptimizer.AddArguments(brute_parser)
return parser
def SetLoggingVerbosity(args):
logger = logging.getLogger()
if args.verbose_count == 0:
logger.setLevel(logging.WARNING)
elif args.verbose_count == 1:
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.DEBUG)
def main():
parser = CreateArgumentParser()
args = parser.parse_args()
SetLoggingVerbosity(args)
optimizer = optimizer_set.OptimizerSet(args, args.clazz)
optimizer.RunOptimization()
return 0
if __name__ == '__main__':
sys.exit(main())
|
srcLurker/home-assistant
|
homeassistant/components/camera/__init__.py
|
Python
|
mit
| 5,960
| 0
|
# pylint: disable=too-many-lines
"""
Component to interface with cameras.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/camera/
"""
import asyncio
import logging
from aiohttp import web
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.components.http import HomeAssistantView
DOMAIN = 'camera'
DEPENDENCIES = ['http']
SCAN_INTERVAL = 30
ENTITY_ID_FORMAT = DOMAIN + '.{}'
STATE_RECORDING = 'recording'
STATE_STREAMING = 'streaming'
STATE_IDLE = 'idle'
ENTITY_IMAGE_URL = '/api/camera_proxy/{0}?token={1}'
@asyncio.coroutine
def async_setup(hass, config):
"""Setup the camera component."""
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
hass.http.register_view(CameraImageView(hass, component.entities))
hass.http.register_view(CameraMjpegStream(hass, component.entities))
yield from component.async_setup(config)
return True
class Camera(Entity):
"""The base class for camera entities."""
def __init__(self):
"""Initialize a camera."""
self.is_streaming = False
@property
def access_token(self):
"""Access token for this camera."""
return str(id(self))
@property
def should_poll(self):
"""No need to poll cameras."""
return False
@property
def entity_picture(self):
"""Return a link to the camera feed as entity picture."""
return ENTITY_IMAGE_URL.format(self.entity_id, self.access_token)
@property
def is_recording(self):
"""Return true if the device is recording."""
return False
@property
def brand(self):
"""Camera brand."""
return None
@property
def model(self):
"""Camera model."""
return None
def
|
camera_image(self):
"""Return bytes of camera image."""
raise NotImplementedError()
@asyncio.coroutine
def async_camera_image(self):
"""Return bytes of camera image.
This method must be run in the event
|
loop.
"""
image = yield from self.hass.loop.run_in_executor(
None, self.camera_image)
return image
@asyncio.coroutine
def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from camera images.
This method must be run in the event loop.
"""
response = web.StreamResponse()
response.content_type = ('multipart/x-mixed-replace; '
'boundary=--jpegboundary')
yield from response.prepare(request)
def write(img_bytes):
"""Write image to stream."""
response.write(bytes(
'--jpegboundary\r\n'
'Content-Type: image/jpeg\r\n'
'Content-Length: {}\r\n\r\n'.format(
len(img_bytes)), 'utf-8') + img_bytes + b'\r\n')
last_image = None
try:
while True:
img_bytes = yield from self.async_camera_image()
if not img_bytes:
break
if img_bytes is not None and img_bytes != last_image:
write(img_bytes)
# Chrome seems to always ignore first picture,
# print it twice.
if last_image is None:
write(img_bytes)
last_image = img_bytes
yield from response.drain()
yield from asyncio.sleep(.5)
finally:
yield from response.write_eof()
@property
def state(self):
"""Camera state."""
if self.is_recording:
return STATE_RECORDING
elif self.is_streaming:
return STATE_STREAMING
else:
return STATE_IDLE
@property
def state_attributes(self):
"""Camera state attributes."""
attr = {
'access_token': self.access_token,
}
if self.model:
attr['model_name'] = self.model
if self.brand:
attr['brand'] = self.brand
return attr
class CameraView(HomeAssistantView):
"""Base CameraView."""
requires_auth = False
def __init__(self, hass, entities):
"""Initialize a basic camera view."""
super().__init__(hass)
self.entities = entities
@asyncio.coroutine
def get(self, request, entity_id):
"""Start a get request."""
camera = self.entities.get(entity_id)
if camera is None:
return web.Response(status=404)
authenticated = (request.authenticated or
request.GET.get('token') == camera.access_token)
if not authenticated:
return web.Response(status=401)
response = yield from self.handle(request, camera)
return response
@asyncio.coroutine
def handle(self, request, camera):
"""Hanlde the camera request."""
raise NotImplementedError()
class CameraImageView(CameraView):
"""Camera view to serve an image."""
url = "/api/camera_proxy/{entity_id}"
name = "api:camera:image"
@asyncio.coroutine
def handle(self, request, camera):
"""Serve camera image."""
image = yield from camera.async_camera_image()
if image is None:
return web.Response(status=500)
return web.Response(body=image)
class CameraMjpegStream(CameraView):
"""Camera View to serve an MJPEG stream."""
url = "/api/camera_proxy_stream/{entity_id}"
name = "api:camera:stream"
@asyncio.coroutine
def handle(self, request, camera):
"""Serve camera image."""
yield from camera.handle_async_mjpeg_stream(request)
|
neuroo/equip
|
tests/test_visitors.py
|
Python
|
apache-2.0
| 2,730
| 0.005128
|
import pytest
from testutils import get_co, get_bytecode
from equip import BytecodeObject, BlockVisitor
from equip.bytecode import MethodDeclaration, TypeDeclaration, ModuleDeclaration
from equip.bytecode.utils import show_bytecode
import equip.utils.log as logutils
from equip.utils.log import logger
logutils.enableLogger(to_file='./equip.log')
from equip.analysis import ControlFlow, BasicBlock
SIMPLE_PROGRAM = """
import random
import sys
a = lambda x, y: x + (y if foo == 'bar' else x)
def some_value(i):
if (i % 2) == 0:
print "even",
elif foobar:
print "whatever"
else:
print "odd",
for n in range(2, 10):
for x in range(2, n):
if n % x == 0:
print n, 'equals', x, '*', n/x
break
print "foobar"
else:
# loop fell through without finding a factor
print n, 'is a prime number'
print "number: %d" % i
return i - 1
def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
while True:
ok = raw_input(prompt)
if ok in ('y', 'ye', 'yes'):
return True
if ok in ('n', 'no', 'nop', 'nope'):
return False
print False
retries = retries - 1
if retries < 0:
raise IOError('refusenik user')
print "Never reached"
print complaint
if foobar:
print "whatever"
def with_stmt(something):
with open('output.txt', 'w') as f:
f.write('Hi there!')
def exception_tests():
try:
fd = open('something')
except SomeException, ex:
print "SomeException"
except Exception, ex:
print "Last Exception"
finally:
print "Finally"
def while_loop(data, start):
while start < len(data):
print start
start += 1
if start > 10:
return -1
def main():
for i in range(1, random.randint()):
print some_value(i)
print "Call stats:"
items = sys.callstats().items()
items = [(value, key) for key, value in items]
items.sort()
items.reverse()
for value,key in items:
print "%30s: %30s"%(key, value)
def return_Stmts(i):
if i == 1:
return 1
elif i ==
|
2:
return 2
print "This is something else"
if __name__ == '__mai
|
n__':
main()
"""
def test_block_visitor():
co_simple = get_co(SIMPLE_PROGRAM)
assert co_simple is not None
bytecode_object = BytecodeObject('<string>')
bytecode_object.parse_code(co_simple)
class BlockPrinterVisitor(BlockVisitor):
def __init__(self):
BlockVisitor.__init__(self)
def new_control_flow(self):
logger.debug("Received new CFG: %s", self.control_flow)
def visit(self, block):
logger.debug("Visiting block: %s", block)
logger.debug('\n' + show_bytecode(block.bytecode))
visitor = BlockPrinterVisitor()
bytecode_object.accept(visitor)
|
ecolitan/fatics
|
src/command/game_command.py
|
Python
|
agpl-3.0
| 6,711
| 0.002086
|
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Wil Mahan <wmahan+fatics@gmail.com>
#
# This file is part of FatICS.
#
# FatICS is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FatICS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITN
|
ESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with FatICS. If not, see <http://www.gnu.org/licenses/>.
#
import re
import offer
import game
from command_parser import BadCommandError
from command import ics_command, Command
from game_constants import *
from db import db
class GameMixin(object):
def _get_played_game(self, conn):
g = conn.user.ses
|
sion.game
if not g or g.gtype != game.PLAYED:
g = None
conn.write(_("You are not playing a game.\n"))
return g
def _game_param(self, param, conn):
""" Find a game from a command argument, currently being
played, examined, or observed, prioritized in that order. """
if param is not None:
g = game.from_name_or_number(param, conn)
else:
if conn.user.session.game:
g = conn.user.session.game
elif conn.user.session.observed:
g = conn.user.session.observed.primary()
else:
conn.write(_("You are not playing, examining, or observing a game.\n"))
g = None
return g
@ics_command('abort', 'n')
class Abort(Command, GameMixin):
def run(self, args, conn):
g = self._get_played_game(conn)
if not g:
return
'''if len(conn.user.session.games) > 1:
conn.write(_('Please use "simabort" for simuls.\n'))
return'''
g = conn.user.session.game
if g.variant.pos.ply < 2:
g.result('Game aborted on move 1 by %s' % conn.user.name, '*')
else:
offer.Abort(g, conn.user)
@ics_command('adjourn', '')
class Adjourn(Command, GameMixin):
def run(self, args, conn):
g = self._get_played_game(conn)
if not g:
return
g = conn.user.session.game
#if g.variant.pos.ply < 5:
offer.Adjourn(g, conn.user)
@ics_command('draw', 'o')
class Draw(Command, GameMixin):
def run(self, args, conn):
if args[0] is None:
g = self._get_played_game(conn)
if not g:
return
offer.Draw(g, conn.user)
else:
conn.write('TODO: DRAW PARAM\n')
@ics_command('resign', 'o')
class Resign(Command, GameMixin):
def run(self, args, conn):
if args[0] is not None:
conn.write('TODO: RESIGN PLAYER\n')
return
g = self._get_played_game(conn)
if g:
g.resign(conn.user)
@ics_command('eco', 'oo')
class Eco(Command, GameMixin):
eco_pat = re.compile(r'[a-z][0-9][0-9][a-z]?')
nic_pat = re.compile(r'[a-z][a-z]\.[0-9][0-9]')
def run(self, args, conn):
g = None
if args[1] is not None:
assert(args[0] is not None)
rows = []
if args[0] == 'e':
if not self.eco_pat.match(args[1]):
conn.write(_("You haven't specified a valid ECO code.\n"))
else:
rows = db.look_up_eco(args[1])
elif args[0] == 'n':
if not self.nic_pat.match(args[1]):
conn.write(_("You haven't specified a valid NIC code.\n"))
else:
rows = db.look_up_nic(args[1])
else:
raise BadCommandError()
for row in rows:
if row['eco'] is None:
row['eco'] = 'A00'
if row['nic'] is None:
row['nic'] = '-----'
if row['long_'] is None:
row['long_'] = 'Unknown / not matched'
assert(row['fen'] is not None)
conn.write('\n')
conn.write(' ECO: %s\n' % row['eco'])
conn.write(' NIC: %s\n' % row['nic'])
conn.write(' LONG: %s\n' % row['long_'])
conn.write(' FEN: %s\n' % row['fen'])
else:
g = self._game_param(args[0], conn)
if g:
(ply, eco, long) = g.get_eco()
(nicply, nic) = g.get_nic()
conn.write(_('Eco for game %d (%s vs. %s):\n') % (g.number, g.white_name, g.black_name))
conn.write(_(' ECO[%3d]: %s\n') % (ply, eco))
conn.write(_(' NIC[%3d]: %s\n') % (nicply, nic))
conn.write(_('LONG[%3d]: %s\n') % (ply, long))
@ics_command('moves', 'n')
class Moves(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
g.write_moves(conn)
@ics_command('moretime', 'd')
class Moretime(Command, GameMixin):
def run(self, args, conn):
g = self._get_played_game(conn)
if g:
secs = args[0]
if secs < 1 or secs > 36000:
conn.write(_('Invalid number of seconds.\n'))
else:
g.moretime(secs, conn.user)
@ics_command('flag', '')
class Flag(Command):
def run(self, args, conn):
if not conn.user.session.game:
conn.write(_("You are not playing a game.\n"))
return
g = conn.user.session.game
if not g.clock.check_flag(g, opp(g.get_user_side(conn.user))):
conn.write(_('Your opponent is not out of time.\n'))
@ics_command('refresh', 'n')
class Refresh(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
g.send_board(conn.user, isolated=True)
@ics_command('time', 'n')
class Time(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
(white_clock, black_clock) = g.clock.as_str()
g.send_info_str(conn.user)
conn.write(_('White Clock : %s\n') % white_clock)
conn.write(_('Black Clock : %s\n') % black_clock)
@ics_command('ginfo', 'n')
class Ginfo(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
g.ginfo(conn)
# vim: expandtab tabstop=4 softtabstop=4 shiftwidth=4 smarttab autoindent
|
motmot/fastimage
|
motmot/FastImage/util.py
|
Python
|
bsd-3-clause
| 2,489
| 0.011651
|
import glob, os, sys
def get_build_info(ipp_static=True, # static build requires static IPP libs
ipp_arch=None,
ipp_root=None,
):
"""get options to build Python extensions built with Intel IPP
ipp_static - True to build using static IPP library (requires IPP license)
ipp_arch - Architecture of IPP to use (None uses default, 'intel64' and 'ia32' are options)
ipp_root - Root location of the IPP installation (e.g. /opt/intel/compilers_and_libraries/linux/ipp)
"""
if ipp_arch is None:
if sys.platform == 'darwin':
ipp_arch = 'intel64'
elif sys.platform.startswith('linux'):
machine = os.uname()[4]
if machine == 'x86_64':
ipp_arch = 'intel64'
elif machine
|
in ['i386','i686']:
ipp_arch = 'ia32'
else:
raise ValueError("unexpected linux architecture: %s"%machine)
elif sys.platform == 'win32':
ipp_arch = 'ia32'
else:
|
raise NotImplementedError("auto-architecture detection not implemented on this platform")
vals = {}
if sys.platform.startswith('linux'):
libdirname = 'lib/%s_lin' % ipp_arch
else:
libdirname = 'lib'
incdirname = 'include'
ipp_define_macros = []
ipp_extra_link_args = []
ipp_extra_objects = []
if sys.platform.startswith('win'):
ipp_define_macros = [('FASTIMAGE_IPP_ARCH','\\"%s\\"'%ipp_arch)]
else:
ipp_define_macros = [('FASTIMAGE_IPP_ARCH','"%s"'%ipp_arch)]
if ipp_static:
ipp_define_macros.append( ('FASTIMAGE_STATICIPP','1') )
ipp_include_dirs = [os.path.join(ipp_root,incdirname)]
# like LDFLAGS in sample Makefile.osx
if ipp_static:
ipp_library_dirs = []
ipp_libraries = []
libdir = os.path.join(ipp_root,libdirname)
ipp_extra_objects = [os.path.join(libdir,'lib'+lib+'.a') for lib in 'ippi','ipps','ippcv','ippcc','ippcore']
else:
ipp_library_dirs = [ os.path.join(ipp_root,libdirname) ]
ipp_libraries = ['ippi','ipps','ippcv','ippcc','ippcore']
ipp_extra_objects = []
vals['extra_link_args'] = ipp_extra_link_args
vals['ipp_library_dirs'] = ipp_library_dirs
vals['ipp_libraries'] = ipp_libraries
vals['ipp_define_macros'] = ipp_define_macros
vals['ipp_include_dirs'] = ipp_include_dirs
vals['ipp_extra_objects'] = ipp_extra_objects
return vals
|
yujikato/DIRAC
|
src/DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_Watchdog.py
|
Python
|
gpl-3.0
| 958
| 0.018789
|
""" unit test for Watchdog.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# impor
|
ts
import os
from mock import MagicMock
# sut
from DIRAC.WorkloadManagementSystem.JobWrapper.Watchdog import Watchdog
mock_exeThread = MagicMock()
mock_spObject = MagicMock()
def test_calibrate():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
|
res = wd.calibrate()
assert res['OK'] is True
def test__performChecks():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
def test__performChecksFull():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
wd.testCPULimit = 1
wd.testMemoryLimit = 1
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
|
sgala/gajim
|
src/common/xmpp/simplexml.py
|
Python
|
gpl-3.0
| 18,852
| 0.039518
|
## simplexml.py based on Mattew Allum's xmlstream.py
##
## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
# $Id: simplexml.py,v 1.27 2005/04/30 07:20:27 snakeru Exp $
"""Simplexml module provides xmpppy library with all needed tools to handle XML nodes and XML streams.
I'm personally using it in many other separate projects. It is designed to be as standalone as possible."""
import xml.parsers.expat
def XMLescape(txt):
"""Returns provided string with symbols & < > " replaced by their respective XML entities."""
# replace also FORM FEED and ESC, because they are not valid XML chars
return txt.replace("&", "&").replace("<", "<").replace(">", ">").replace('"', """).replace(u'\x0C', "").replace(u'\x1B', "")
ENCODING='utf-8'
def ustr(what):
"""Converts object "what" to unicode string using it's own __str__ method if accessible or unicode method otherwise."""
if isinstance(what, unicode): return what
try: r=what.__str__()
except AttributeError: r=str(what)
if not isinstance(r, unicode):
|
return unicode(r,ENCODING)
return r
class Node(object):
|
""" Node class describes syntax of separate XML Node. It have a constructor that permits node creation
from set of "namespace name", attributes and payload of text strings and other nodes.
It does not natively support building node from text string and uses NodeBuilder class for that purpose.
After creation node can be mangled in many ways so it can be completely changed.
Also node can be serialised into string in one of two modes: default (where the textual representation
of node describes it exactly) and "fancy" - with whitespace added to make indentation and thus make
result more readable by human.
Node class have attribute FORCE_NODE_RECREATION that is defaults to False thus enabling fast node
replication from the some other node. The drawback of the fast way is that new node shares some
info with the "original" node that is changing the one node may influence the other. Though it is
rarely needed (in xmpppy it is never needed at all since I'm usually never using original node after
replication (and using replication only to move upwards on the classes tree).
"""
FORCE_NODE_RECREATION=0
def __init__(self, tag=None, attrs={}, payload=[], parent=None, nsp=None, node_built=False, node=None):
""" Takes "tag" argument as the name of node (prepended by namespace, if needed and separated from it
by a space), attrs dictionary as the set of arguments, payload list as the set of textual strings
and child nodes that this node carries within itself and "parent" argument that is another node
that this one will be the child of. Also the __init__ can be provided with "node" argument that is
either a text string containing exactly one node or another Node instance to begin with. If both
"node" and other arguments is provided then the node initially created as replica of "node"
provided and then modified to be compliant with other arguments."""
if node:
if self.FORCE_NODE_RECREATION and isinstance(node, Node):
node=str(node)
if not isinstance(node, Node):
node=NodeBuilder(node,self)
node_built = True
else:
self.name,self.namespace,self.attrs,self.data,self.kids,self.parent,self.nsd = node.name,node.namespace,{},[],[],node.parent,{}
for key in node.attrs.keys(): self.attrs[key]=node.attrs[key]
for data in node.data: self.data.append(data)
for kid in node.kids: self.kids.append(kid)
for k,v in node.nsd.items(): self.nsd[k] = v
else: self.name,self.namespace,self.attrs,self.data,self.kids,self.parent,self.nsd = 'tag','',{},[],[],None,{}
if parent:
self.parent = parent
self.nsp_cache = {}
if nsp:
for k,v in nsp.items(): self.nsp_cache[k] = v
for attr,val in attrs.items():
if attr == 'xmlns':
self.nsd[u''] = val
elif attr.startswith('xmlns:'):
self.nsd[attr[6:]] = val
self.attrs[attr]=attrs[attr]
if tag:
if node_built:
pfx,self.name = (['']+tag.split(':'))[-2:]
self.namespace = self.lookup_nsp(pfx)
else:
if ' ' in tag:
self.namespace,self.name = tag.split()
else:
self.name = tag
if isinstance(payload, basestring): payload=[payload]
for i in payload:
if isinstance(i, Node): self.addChild(node=i)
else: self.data.append(ustr(i))
def lookup_nsp(self,pfx=''):
ns = self.nsd.get(pfx,None)
if ns is None:
ns = self.nsp_cache.get(pfx,None)
if ns is None:
if self.parent:
ns = self.parent.lookup_nsp(pfx)
self.nsp_cache[pfx] = ns
else:
return 'http://www.gajim.org/xmlns/undeclared'
return ns
def __str__(self,fancy=0):
""" Method used to dump node into textual representation.
if "fancy" argument is set to True produces indented output for readability."""
s = (fancy-1) * 2 * ' ' + "<" + self.name
if self.namespace:
if not self.parent or self.parent.namespace!=self.namespace:
if 'xmlns' not in self.attrs:
s = s + ' xmlns="%s"'%self.namespace
for key in self.attrs.keys():
val = ustr(self.attrs[key])
s = s + ' %s="%s"' % ( key, XMLescape(val) )
s = s + ">"
cnt = 0
if self.kids:
if fancy: s = s + "\n"
for a in self.kids:
if not fancy and (len(self.data)-1)>=cnt: s=s+XMLescape(self.data[cnt])
elif (len(self.data)-1)>=cnt: s=s+XMLescape(self.data[cnt].strip())
s = s + a.__str__(fancy and fancy+1)
cnt=cnt+1
if not fancy and (len(self.data)-1) >= cnt: s = s + XMLescape(self.data[cnt])
elif (len(self.data)-1) >= cnt: s = s + XMLescape(self.data[cnt].strip())
if not self.kids and s.endswith('>'):
s=s[:-1]+' />'
if fancy: s = s + "\n"
else:
if fancy and not self.data: s = s + (fancy-1) * 2 * ' '
s = s + "</" + self.name + ">"
if fancy: s = s + "\n"
return s
def addChild(self, name=None, attrs={}, payload=[], namespace=None, node=None):
""" If "node" argument is provided, adds it as child node. Else creates new node from
the other arguments' values and adds it as well."""
if node:
newnode=node
node.parent = self
else: newnode=Node(tag=name, parent=self, attrs=attrs, payload=payload)
if namespace:
newnode.setNamespace(namespace)
self.kids.append(newnode)
return newnode
def addData(self, data):
""" Adds some CDATA to node. """
self.data.append(ustr(data))
def clearData(self):
""" Removes all CDATA from the node. """
self.data=[]
def delAttr(self, key):
""" Deletes an attribute "key" """
del self.attrs[key]
def delChild(self, node, attrs={}):
""" Deletes the "node" from the node's childs list, if "node" is an instance.
Else deletes the first node that have specified name and (optionally) attributes. """
if not isinstance(node, Node): node=self.getTag(node,attrs)
self.kids.remove(node)
return node
def getAttrs(self):
""" Returns all node's attributes as dictionary. """
return self.attrs
def getAttr(self, key):
""" Returns value of specified attribute. """
try: return self.attrs[key]
except: return None
def getChildren(self):
""" Returns all node's child nodes as list. """
return self.kids
def getData(self):
""" Returns all node CDATA as string (concatenated). """
return ''.join(self.data)
def getName(self):
""" Returns the name of node """
return self.name
def getNamespace(self):
""" Returns the namespace of node """
return self.namespace
def getParent(self):
""" Returns the parent of node (if present). """
return self.parent
def getPayload(self):
""" Return the payload of node i.e. list of child nodes and CDATA entries.
F.e. for "<node>text1<nodea/><nodeb/> text2</node>" will be returned list:
['text1', <nodea instance>, <nodeb in
|
wadobo/congressus
|
congressus/tickets/urls.py
|
Python
|
agpl-3.0
| 1,208
| 0.005795
|
from django.urls import path
from tickets import views
urlpatterns = [
path('', views.last_event, name='last_event'),
path('event/<str:ev>/', views.event, name='event'),
path('event/<str:ev>/<str:space>/<str:session>/register/', views.register, name='register'),
path('ticket/<str:order>/payment/', views.payment, name='payment'),
|
path('ticket/<str:order>/thanks/', views.thanks, name='thanks'),
path('ticket/confirm/', views.confirm, name='confirm'),
path('ticket/confirm/paypal/', views.confirm_paypal, name='confirm_paypal'),
path('ticket/<str:order>/confirm/stripe/', views.confirm_stripe, name='confirm_stripe'),
path('ticket/template/<int:id>/preview/', views.template_preview, name='template_p
|
review'),
path('ticket/email-confirm/<int:id>/preview/', views.email_confirm_preview, name='email_confirm_preview'),
path('<str:ev>/', views.multipurchase, name='multipurchase'),
path('seats/<int:session>/<int:layout>/', views.ajax_layout, name='ajax_layout'),
path('seats/view/<int:map>/', views.seats, name='seats'),
path('seats/auto/', views.autoseats, name='autoseats'),
path('seats/bystr/', views.seats_by_str, name='seats_by_str'),
]
|
amitgroup/parts-net
|
scripts/train_and_test5.py
|
Python
|
bsd-3-clause
| 12,277
| 0.013928
|
from __future__ import division, print_function, absolute_import
#from pnet.vzlog import default as vz
import numpy as np
import amitgroup as ag
import itertools as itr
import sys
import os
#import gv
import pnet
import time
def test(ims, labels, net):
yhat = net.classify(ims)
return yhat == labels
if pnet.parallel.main(__name__):
print("1")
import argparse
parser = argparse.ArgumentParser()
#parser.add_argument('seed', metavar='<seed>', type=int, help='Random seed')
parser.add_argument('param', metavar='<param>', type=float)
args0 = parser.parse_args()
param = args0.param
#for i in xrange(1, 7):
# print(make_support(i, 4).astype(np.uint8))
#params = randomize_layers_parameters(args0.seed)
#print(params)
unsup_training_times = []
sup_training_times = []
testing_times = []
error_rates = []
all_num_parts = []
maxdepth = 7
print("2")
# Switch which experiment here
#from pnet.mnist_danny import parse_background_random as loadf
from pnet.mnist_danny import parse_background_images as loadf
print("Loading...")
mnist_data = loadf()
print("Done.")
for training_seed in xrange(1):
layers = [
#pnet.IntensityThresholdLayer(),
pnet.EdgeLayer(k=5, radius=1, spread='orthogonal', minimum_contrast=0.05),#, pre_blurring=1.0),
#pnet.IntensityThresholdLayer(),
#pnet.IntensityThresholdLayer(),
pnet.PartsLayer(250, (7, 7), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=60,
max_samples=200000,
train_limit=10000,
min_prob=0.00005,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
if 0:
layers += [
pnet.RandomForestPartsLayer(256, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=10000,
|
min_prob=0.0005,
trees=10,
max_depth=3,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.GaussianPartsLayer(100, (5, 5), settings=dict(
em_seed=t
|
raining_seed,
samples_per_image=40,
max_samples=200000,
train_limit=100000,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.PartsLayer(1000, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=100000,
min_prob=0.0005,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.BinaryTreePartsLayer(maxdepth, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=10000,
min_prob=0.005,
#keypoint_suppress_radius=1,
min_samples_per_part=50,
split_criterion='IG',
split_entropy=0.2,
min_information_gain=0.01,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.BinaryTreePartsLayer(maxdepth, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=10000,
min_prob=0.0005,
#keypoint_suppress_radius=1,
min_samples_per_part=50,
split_criterion=split_criterion,
split_entropy=split_entropy,
min_information_gain=split_entropy,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
[
pnet.BinaryTreePartsLayer(10, (1, 1), settings=dict(outer_frame=0,
em_seed=training_seed+1,
threshold=1,
samples_per_image=200,
max_samples=1000000,
train_limit=10000,
#min_information_gain=0.05,
split_entropy=0.05,
min_prob=0.0005
)),
pnet.PoolingLayer(shape=(1, 1), strides=(1, 1)),
]
layers += [
pnet.MixtureClassificationLayer(n_components=1, min_prob=1e-5),
#pnet.SVMClassificationLayer(C=None),
]
net = pnet.PartsNet(layers)
TRAIN_SAMPLES = 10000
#TRAIN_SAMPLES = 1200
print(training_seed)
digits = range(10)
#ims = ag.io.load_mnist('training', selection=slice(0 + 3000 * training_seed, TRAIN_SAMPLES + 3000 * training_seed), return_labels=False)
ims = mnist_data['training_image'][0 + 1000 * training_seed : TRAIN_SAMPLES + 1000 * training_seed]
ims_label = mnist_data['training_label'][0 + 1000 * training_seed : TRAIN_SAMPLES + 1000 * training_seed]
validati
|
adelina-t/compute-hyperv
|
hyperv/tests/unit/test_eventhandler.py
|
Python
|
apache-2.0
| 7,212
| 0
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
import mock
from nova import exception
from hyperv.nova import constants
from hyperv.nova import eventhandler
from hyperv.nova import utilsfactory
from hyperv.tests.unit import test_base
class EventHandlerTestCase(test_base.HyperVBaseTestCase):
_FAKE_POLLING_INTERVAL = 3
_FAKE_EVENT_CHECK_TIMEFRAME = 15
@mock.patch.object(utilsfactory, 'get_vmutils')
def setUp(self, mock_get_vmutils):
super(EventHandlerTestCase, self).setUp()
self._state_change_callback = mock.Mock()
self._running_state_callback = mock.Mock()
self.flags(
power_state_check_timeframe=self._FAKE_EVENT_CHECK_TIMEFRAME,
group='hyperv')
self.flags(
power_state_event_polling_interval=self._FAKE_POLLING_INTERVAL,
group='hyperv')
self._event_handler = eventhandler.InstanceEventHandler(
self._state_change_callback)
self._event_handler._serial_console_ops = mock.Mock()
@mock.patch.object(eventhandler, 'wmi', create=True)
@mock.patch.object(eventhandler.InstanceEventHandler, '_dispatch_event')
@mock.patch.object(eventlet, 'sleep')
def _test_poll_events(self, mock_sleep, mock_dispatch,
mock_wmi, event_found=True):
fake_listener = mock.Mock()
mock_wmi.x_wmi_timed_out = Exception
fake_listener.side_effect = (mock.sentinel.event if event_found
else mock_wmi.x_wmi_timed_out,
KeyboardInterrupt)
self._event_handler._listener = fake_listener
# This is supposed to run as a daemon, so we'll just cause an exception
# in order to be able to test the method.
self.assertRaises(KeyboardInterrupt,
self._event_handler._poll_events)
if event_found:
mock_dispatch.assert_called_once_with(mock.sentinel.event)
else:
mock_sleep.assert_called_once_with(self._FAKE_POLLING_INTERVAL)
def test_poll_having_events(self):
# Test case in which events were found in the checked interval
self._test_poll_events()
def test_poll_no_event_found(self):
self._test_poll_events(event_found=False)
@mock.patch.object(eventhandler.InstanceEventHandler,
'_get_instance_uuid')
@mock.patch.object(eventhandler.InstanceEventHandler, '_emit_event')
def _test_dispatch_event(self, mock_emit_event, mock_get_uuid,
missing_uuid=False):
mock_get_uuid.return_value = (
mock.sentinel.instance_uuid if not missing_uuid else None)
self._event_handler._vmutils.get_vm_power_state.return_value = (
mock.sentinel.power_state)
event = mock.Mock()
event.ElementName = mock.sentinel.instance_name
event.EnabledState = mock.sentinel.enabled_state
self._event_handler._dispatch_event(event)
if not missing_uuid:
mock_emit_event.assert_called_once_with(
mock.sentinel.instance_name,
mock.sentinel.instance_uuid,
mock.sentinel.power_state)
else:
self.assertFalse(mock_emit_event.called)
def test_dispatch_event_new_final_state(self):
self._test_dispatch_event()
def test_dispatch_event_missing_uuid(self):
self._test_dispatch_event(missing_uuid=True)
@mock.patch.object(eventhandler.InstanceEventHandler, '_get_virt_event')
@mock.patch.object(eventlet, 'spawn_n')
def test_emit_event(self, mock_spawn, mock_get_event):
self._event_handler._emit_event(mock.sentinel.instance_name,
mock.sentinel.instance_uuid,
mock.sentinel.instance_state)
virt_event = mock_get_event.return_value
mock_spawn.assert_has_calls(
[mock.call(self._state_change_callback, virt_event),
mock.call(self._event_handler._handle_serial_console_workers,
mock.sentinel.instance_name,
mock.sentinel.instance_state)])
def test_handle_serial_console_instance_running(self):
self._event_handler._handle_serial_console_workers(
mock.sentinel.instance_name,
constants.HYPERV_VM_STATE_ENABLED)
serialops = self._event_handler._serial_console_ops
serialops.start_console_handler.assert_called_once_with(
mock.sentinel.instance_name)
def test_handle_serial_console_instance_stopped(self):
self._event_handler._handle_serial_console_workers(
mock.sentinel.instance_name,
constants.HYPERV_VM_STATE_DISABLED)
serialops = self._event_handler._serial_console_ops
serialops.stop_console_handler.assert_called_once_with(
mock.sentinel.instance_name)
def _test_get_instance_uuid(self, instance_found=True,
missing_uuid=False):
if instance_found:
side_effect = (mock.sentinel.instance_uuid
if not missing_uuid else None, )
else:
side_effect = exception.NotFound
mock_get_uuid = self._event_handler._vmutils.get_instance_uuid
mock_get_uuid.side_effect = side_effect
instance_uuid = self._event_handler._get_instance_uuid(
mock.sentinel.instance_name)
expected_uuid = (mock.sentinel.instance_uuid
|
if instance_found and not missing_uuid else None)
self.assertEqual(expected_uuid, instance_uuid)
def test_get_nova_created_instance_uuid(self):
self._test_get_instance_uuid()
|
def test_get_deleted_instance_uuid(self):
self._test_get_instance_uuid(instance_found=False)
def test_get_instance_uuid_missing_notes(self):
self._test_get_instance_uuid(missing_uuid=True)
@mock.patch('nova.virt.event.LifecycleEvent')
def test_get_virt_event(self, mock_lifecycle_event):
instance_state = constants.HYPERV_VM_STATE_ENABLED
expected_transition = self._event_handler._TRANSITION_MAP[
instance_state]
virt_event = self._event_handler._get_virt_event(
mock.sentinel.instance_uuid, instance_state)
self.assertEqual(mock_lifecycle_event.return_value,
virt_event)
mock_lifecycle_event.assert_called_once_with(
uuid=mock.sentinel.instance_uuid,
transition=expected_transition)
|
clreinki/GalaxyHarvester
|
udPassword.py
|
Python
|
agpl-3.0
| 2,630
| 0.001901
|
#!/usr/bin/python
"""
Copyright 2010 Paul Willworth <ioscode@gmail.com>
This file is part of Galaxy Harvester.
Galaxy Harvester is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Galaxy Harvester is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Galaxy Harvester. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import cgi
import Cookie
import hashlib
import MySQLdb
import dbSession
import dbShared
# Get current url
try:
url = os.environ['SCRIPT_NAME
|
']
except KeyError:
|
url = ''
form = cgi.FieldStorage()
# Get Cookies
errorstr = ''
cookies = Cookie.SimpleCookie()
try:
cookies.load(os.environ['HTTP_COOKIE'])
except KeyError:
errorstr = 'no cookies\n'
if errorstr == '':
try:
currentUser = cookies['userID'].value
except KeyError:
currentUser = ''
try:
loginResult = cookies['loginAttempt'].value
except KeyError:
loginResult = 'success'
try:
sid = cookies['gh_sid'].value
except KeyError:
sid = form.getfirst('gh_sid', '')
else:
currentUser = ''
loginResult = 'success'
sid = form.getfirst('gh_sid', '')
userpass = form.getfirst("userpass")
# escape input to prevent sql injection
sid = dbShared.dbInsertSafe(sid)
userpass = dbShared.dbInsertSafe(userpass)
# Get a session
logged_state = 0
linkappend = ''
sess = dbSession.getSession(sid, 2592000)
if (sess != ''):
logged_state = 1
currentUser = sess
linkappend = 'gh_sid=' + sid
# Check for errors
errstr='';
if (len(userpass) < 6):
errstr = errstr + "Your password must be at least 6 characters. \r\n"
if (logged_state == 0):
errstr = errstr + "You must be logged in to update your password. \r\n"
if (errstr != ''):
result = "Your Password could not be updated because of the following errors:\r\n" + errstr
else:
crypt_pass = hashlib.sha1(currentUser + userpass).hexdigest()
conn = dbShared.ghConn()
cursor = conn.cursor()
cursor.execute("UPDATE tUsers SET userPassword='" + crypt_pass + "' WHERE userID='" + currentUser + "';")
cursor.close()
conn.close()
result = "Password Updated"
print "Content-Type: text/html\n"
print result
|
schets/LILAC
|
src/scripts/python/process_data.py
|
Python
|
bsd-3-clause
| 2,106
| 0.012346
|
import numpy as np
import scipy.io as sio
import pylab as pl
import itertools as it
import io
def ret_tru(inval):
return true
def get_next_ind(in_file, sep_ln="&", predicate = ret_tru):
lvals = []
for line in in_file:
line = line.strip()
if line == sep_ln:
break
if predicate(line):
lvals.append(line)
return lvals
def split_list(inl, predicate):
curl = []
tl = []
for v in inl:
if(predicate(v)):
if len(tl) > 0:
curl.append(tl)
tl = []
else:
tl.append(v)
return curl
dat_file = open("data_out.out")
#do processing for each value in the output
ltest = get_next_ind(dat_file, "&", lambda x:(x.startswith("Func") or x.startswith("Ablation") or x.startswith("Time") or x.startswith("&&")))
while(len(ltest) != 0):
ltest = split_list(ltest, lambda x:x == "&&")
if(len(ltest) == 0):
break
fncstack = []
timelist = ""
abl_val=[]
#create numpy arrays
for simval in ltest:
if len(simval)==1:
abl_val = np.fromstring(''.join(it.dropwhile(lambda x : not x.isdigit(), simval[0])), sep=' ')
continue
timestr = simval[0]
fncstr = simval[1]
timestr = it.dropwhile(lambda x: not x.isdigit(), timestr)
timestr = ''.join(timestr)
timelist = timelist + " " + timestr
fncstr = it.dropwhile(lambda x: not x.isdigit(), fncstr)
fncstr = ''.join(fncstr)
fncstack.append(np.fromstring(fncstr, sep=' '))
print len(fncstack)
neur_mat = np.vstack(fncstack).transpose();
time_vec = np.fromstring(timelist, sep=' ')
#write to .mat file
#create file name
fbase = "abl_study_"
for abl in abl_val:
fbase = fbase + str(int(abl)) + "_"
fbase = fbase[:-1
|
]
print fbase
sio.
|
savemat(fbase, {'abl_vals':np.array(abl_val), 'neur_series':neur_mat, 'time_vec':time_vec})
ltest = get_next_ind(dat_file, "&", lambda x:(x.startswith("Ablation") or x.startswith("Func") or x.startswith("Time") or x.startswith("&&")))
|
brucetsao/python-devicecloud
|
devicecloud/test/unit/__init__.py
|
Python
|
mpl-2.0
| 312
| 0
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.
|
org/MPL/2
|
.0/.
#
# Copyright (c) 2015 Etherios, Inc. All rights reserved.
# Etherios, Inc. is a Division of Digi International.
|
sounak98/coala-bears
|
tests/c_languages/CPPLintBearTest.py
|
Python
|
agpl-3.0
| 843
| 0
|
from bears.c_languages.CPPLintBear import CPPLintBear
from tests.LocalBearTestHelper import verify_local_bear
test_file = """
int main() {
return 0;
}
"""
CPPLintBearTest = verify_local_bear(CPPLintBear,
|
valid_files=(),
invalid_files=(test_file,),
tempfile_kwargs={'suffix': '.cpp'})
CPPLintBea
|
rIgnoreConfigTest = verify_local_bear(
CPPLintBear,
valid_files=(test_file,),
invalid_files=(),
settings={'cpplint_ignore': 'legal'},
tempfile_kwargs={'suffix': '.cpp'})
CPPLintBearLineLengthConfigTest = verify_local_bear(
CPPLintBear,
valid_files=(),
invalid_files=(test_file,),
settings={'cpplint_ignore': 'legal',
'max_line_length': '13'},
tempfile_kwargs={'suffix': '.cpp'})
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_09_01/operations/_virtual_network_peerings_operations.py
|
Python
|
mit
| 22,805
| 0.004955
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
"""VirtualNetworkPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None
|
, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None
|
]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_
|
padmec-reservoir/ELLIPTIc
|
docs/source/conf.py
|
Python
|
mit
| 5,213
| 0
|
# -*- coding: utf-8 -*-
#
# ELLIPTIc documentation build configuration file, created by
# sphinx-quickstart on Sat Mar 25 15:56:12 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
'sphinx.ext.mathjax',
'sphinx_autodoc_typehints']
napoleon_include_init_with_doc = False
napoleon_include_special_with_doc = True
autodoc_mock_imports = []
nitpicky = True
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'anytree': ('http://anytree.readthedocs.io/en/latest/', None)
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'elliptic'
copyright = u'2018, Universidade Federal de Pernambuco'
author = u'Guilherme Caminha'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.1'
# The full version, including alpha/beta/rc tags.
release = u'1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ellipticdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'elliptic.tex', u'ELLIPTIc Documentation',
u'Guilherme Caminha', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
|
man_pages = [
(master_doc, 'elliptic', u'ELLIPTIc Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(maste
|
r_doc, 'elliptic', u'ELLIPTIc Documentation',
author, 'elliptic', 'The Extensible LIbrary for Physical simulaTIons.',
'Miscellaneous'),
]
|
mayankjohri/LetsExplorePython
|
Section 2 - Advance Python/Chapter S2.06 - Web Development/code/flask/flask_login/sample_2/app.py
|
Python
|
gpl-3.0
| 2,215
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 09:53:39 2018
@author: mayank
"""
from forms import SignupForm
from flask import Flask, request, render_template
from flask_login import LoginManager, login_user, login_required, logout_user
app = Flask(__name__)
app.secret_key = 'gMALVWEuxBSxQ44bomDOsWniejrPbhDV'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db/database.sqlite'
login_manager = LoginManager()
login_manager.init_app(app)
@app.route('/')
def index():
return "Welcome to Home Page"
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if request.method == 'GET':
return render_template('signup.html', form=form)
elif request.method == 'POST':
if form.validate_on_submit():
if User.query.filter_by(email=form.email.data).first():
return "!!! Email address already exists !!!"
newuser = User(form.email.data, form.password.data)
db.session.add(newuser)
db.session.flush()
db.session.commit()
login_user(newuser)
return "User created!!!"
else:
return "Form didn't validate"
@login_manager.user_loader
def load_user(email):
return User.query.filter_by(email=email).first()
@app.route('/login', methods=['GET', 'POST'])
def login():
form = SignupForm()
if request.method == 'GET':
return render_template('login.html', form=form)
elif request.method == 'POST' and form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and user.password == form.password.data:
login_user(user)
return "User logged in"
return "<h1>Wrong username or password</h1>"
return "form not validated or invalid request"
@app.ro
|
ute("/logout")
@login_required
def logout():
logout_user()
return "Logged out"
@app.route('/protected')
@login_required
def protected():
return "protected area"
def init_db():
|
db.init_app(app)
db.app = app
db.create_all()
if __name__ == '__main__':
from models import db, User
init_db()
app.run(port=5000, host='localhost')
|
DearBytes/Remote-Integrity-Tool
|
dear/remote_integrity/models.py
|
Python
|
lgpl-3.0
| 6,183
| 0.001132
|
#!/usr/bin/env python
# Copyright (C) 2017 DearBytes B.V. - All Rights Reserved
import os
from datetime import datetime
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import create_engine
from sqlalchemy.orm import relationship
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
DATABASE_PATH = os.path.join(os.getcwd(), 'integrity.db')
engine = create_engine('sqlite:///' + DATABASE_PATH)
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()
class Model(object):
def to_anonymous_object(self):
"""
Convert the current model properties to an anonymous object
This is to prevent the data from not being able to be accesses outside of the main thread it was created in
:return: Object containing all keys and values that the current model does
:rtype: object
"""
return type('', (object,), self.to_dict())()
def to_dict(self):
"""
Convert the current model properties to a dict
:return: Dict containing all keys and values that the current model does
:rtype: dict
"""
return dict(((key, getattr(self, key)) for key in self.__mapper__.columns.keys()))
def values(self):
"""
Get all values in the current row as a list
:return: List containing all values that the current model does
:rtype: list
"""
return list(((getattr(self, key)) for key in self.keys()))
@classmethod
def keys(cls):
"""
Get all keys in the current row as a list
:return: List containing all keys that the current model does
:rtype: list
"""
return cls.__mapper__.columns.keys()
def delete(self):
"""
Delete the current row
:return:
"""
session.delete(self)
def __iter__(self):
values = vars(self)
for attr in self.__mapper__.columns.keys():
if attr in values:
yield [attr, values[attr]]
@classmethod
def as_list(cls):
"""
Get all results as a list
:return: List
"""
return list(cls)
@classmethod
def query(cls):
"""
Get a new reference to query
:return:
"""
return session.query(cls)
class Server(Model, Base):
__tablename__ = "servers"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False, unique=True)
@classmethod
def get(cls, name):
"""
Get the first instance of a server by name
:param name: Name of the server
:type name: str
:return: Server if found, else None
:rtype: models.Server
"""
return session.query(cls).filter(cls.name == name).one_or_none()
@classmethod
def exists(cls, name):
"""
Check if the server exists in the database
:param name: Name of the server
:return: Returns true if the server exists
:rtype: bool
"""
return cls.get(name=name) is not None
@classmethod
def create(cls, name):
"""
Create a new
:param name:
:return: Instance of the server
:rtype: models.Server
"""
server = cls(name=name)
session.add(server)
return server
def get_related_checksum(self, path, checksum):
"""
Get a related checksum by certain criteria
:param checksum: Checksum of the file
:param path: Path to the file
:type checksum: str
:type path: str
:return: Returns a checksum if one is found, otherwise None
"""
for row in self.checksums:
if row.path == path and row.checksum == checksum:
return row
class Checksum(Model, Base):
__tablename__ = "checksums"
id = Column(Integer, primary_key=True)
path = Column(String, nullable=False)
checksum = Column(String(128), nullable=False)
server = relationship(Server, backref="checksums")
server_id = Column(Integer, ForeignKey("servers.id"), index=True, nullable=False)
@classmethod
def create(cls, path, checksum, server):
"""
Create a new record and return it
:param path: Path to the file
:param checksum: File checksum
:param server: Related server ID
:type path: str
:type checksum: str
:type server: models.Server
:return: Returns the record that was just added
"""
record = cls(path=path, checksum=checksum, server=server)
session.add(record)
return record
class Event(Model, Base):
FILE_ADDED = 1
FILE_REMOVED = 2
FILE_MODIFIED = 3
__tablename__ = "events"
id = Column(Integer, primary_key=True)
event = Column(Integer, nullable=False)
description = Column(String, nullable=False)
timestamp = Column(DateTime, nullable=False)
checksum = relationship(Checksum)
checksum_id = Column(Integer, ForeignKey("checksums.id"), index=True, nullable=False)
@classmethod
def create(cls, event, description, checksum):
"""
Create a new event and store it in the database
:param event: What type of event was it (constant)
:param description: Description of the event
:param checksum: What checksum was it related to
|
:type event: int
:type description: str
:type checksum: models.Checksum
:return: Returns the instance of the event
"""
record = cls(event=event, description=desc
|
ription, checksum=checksum, timestamp=datetime.now())
session.add(record)
return record
def create_database():
""""
Create a new database or overwrite the existing one
:return: None
"""
Base.metadata.create_all(engine)
def database_exists():
"""
Check if the database exists
:return: True if the database exists
:rtype: bool
"""
return os.path.exists(DATABASE_PATH)
|
wavefrontHQ/wavefront-collector
|
wavefront/awsmetrics.py
|
Python
|
apache-2.0
| 18,849
| 0.000955
|
"""
This module calls the AWS ListMetrics() API followed by multiple calls to
GetMetricStatistics() to get metrics from AWS.
A dictionary configu
|
red by the 'metrics' key in the configuration file is
used to determine which metrics should lead to a call to GetMetricStatistics().
Each metric value returned from GetMetricStatistics() is sent to the Wavefront
proxy on port 2878 (or other port if configured differently). Point tags
are picked up from the Dimensions. Sour
|
ce is determined by searching
the point tags for a list of "accepted" source locations
(e.g., 'Service', 'LoadBalancerName', etc).
The last run time is stored in a configuration file in
/opt/wavefront/etc/aws-metrics.conf and will be used on the next run to
determine the appropriate start time. If no configuration file is found,
the start time is determined by subtracting the delay_minutes from the
current time.
"""
import ConfigParser
import datetime
import json
import os
import os.path
import re
import logging.config
import dateutil
from wavefront.aws_common import AwsBaseMetricsCommand, AwsBaseMetricsConfiguration
from wavefront import utils
# Configuration for metrics that should be retrieved is contained in this
# configuration in a "metrics" key. This is a dictionary
# where the key is a regular expression and the value is an object with keys:
# * stats
# a list of statistics to pull down with the GetMetricStatistics() call.
# valid values are any of : 'Average', 'Maximum', 'Minimum', "SampleCount', 'Sum'
# * source_names
# an array of :
# - tag names (Dimensions)
# - Dimensions array index (0 based)
# - String literals
# The first match is returned as the source name.
#
# The key to the dictionary is a regular expression that should match a:
# <namespace>.<metric_name> (lower case with /=>.)
#
DEFAULT_METRIC_CONFIG_FILE = './aws-metrics.json.conf'
# Mapping for statistic name to its "short" name. The short name is used
# in the metric name sent to Wavefront
STAT_SHORT_NAMES = {
'Average': 'avg',
'Minimum': 'min',
'Maximum': 'max',
'Sum': 'sum',
'SampleCount': 'count'
}
# characters to replace in the operation when creating the metric name
SPECIAL_CHARS_REPLACE_MAP = {
'/': '-',
':': '-'
}
#pylint: disable=too-many-instance-attributes
class AwsCloudwatchConfiguration(object):
"""
Configuration for Cloudwatch
"""
def __init__(self, config, region):
super(AwsCloudwatchConfiguration, self).__init__()
self.config = config
self.section_name = 'cloudwatch_' + region
default_section_name = 'cloudwatch'
self.enabled = self.config.getboolean(
self.section_name, 'enabled', False, default_section_name)
self.workers = int(self.config.get(
self.section_name, 'workers', 1, default_section_name))
self.has_suffix_for_single_stat = self.config.getboolean(
self.section_name, 'single_stat_has_suffix', True,
default_section_name)
self.default_delay_minutes = int(self.config.get(
self.section_name, 'first_run_start_minutes', 5,
default_section_name))
self.namespace = self.config.get(
self.section_name, 'namespace', 'aws', default_section_name)
self.ec2_tag_keys = self.config.getlist(
self.section_name, 'ec2_tag_keys', [], default_section_name)
self.metric_config_path = self.config.get(
self.section_name, 'metric_config_path', DEFAULT_METRIC_CONFIG_FILE,
default_section_name)
self.start_time = self.config.getdate(
self.section_name, 'start_time', None, default_section_name)
self.end_time = self.config.getdate(
self.section_name, 'end_time', None, default_section_name)
self.last_run_time = self.config.getdate(
self.section_name, 'last_run_time', None, default_section_name)
self.update_start_end_times()
self.namespaces = set()
self.metrics_config = None
def update_start_end_times(self):
"""
Updates start/end times after last_run_time set
"""
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
delta = datetime.timedelta(minutes=self.default_delay_minutes)
if self.last_run_time:
if not self.start_time or self.last_run_time > self.start_time:
self.start_time = self.last_run_time - delta
self.end_time = utcnow
elif not self.start_time:
self.start_time = utcnow - delta
self.end_time = utcnow
def set_last_run_time(self, run_time):
"""
Sets the last run time to the run_time argument.
Arguments:
run_time - the time when this script last executed successfully (end)
"""
if utils.CANCEL_WORKERS_EVENT.is_set():
return
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
if not run_time:
run_time = utcnow
self.config.set(
self.section_name, 'last_run_time', run_time.isoformat())
self.config.save()
self.last_run_time = run_time
def validate(self):
"""
Validates configuration
"""
if not self.metric_config_path:
raise ValueError('options.metric_config_path is required')
if not os.path.exists(self.metric_config_path):
raise ValueError('ERROR: Configuration file (%s) does not exist' %
(self.metric_config_path))
def load_metric_config(self):
"""
Loads the metric configuration from the configuration file.
"""
if self.metrics_config:
return
with open(self.metric_config_path, 'r') as conffd:
config = json.load(conffd)
if 'metrics' not in config:
raise ValueError('ERROR: Configuration file (%s) is not valid' %
(self.metric_config_path))
self.metrics_config = config['metrics']
for _, config in self.metrics_config.iteritems():
if 'namespace' in config and config['namespace']:
self.namespaces.add(config['namespace'])
#pylint: disable=unsupported-membership-test
#pylint: disable=unsubscriptable-object
def get_metric_config(self, namespace, metric_name):
"""
Given a namespace and metric, get the configuration.
Arguments:
namespace - the namespace
metric_name - the metric's name
Returns:
the configuration for this namespace and metric
"""
self.load_metric_config()
current_match = None
metric = namespace.replace('/', '.').lower() + '.' + metric_name.lower()
for name, config in self.metrics_config.iteritems():
if re.match(name, metric, re.IGNORECASE):
if current_match is None or \
('priority' in current_match and \
current_match['priority'] < config['priority']):
current_match = config
return current_match
#pylint: disable=too-many-instance-attributes
class AwsMetricsConfiguration(AwsBaseMetricsConfiguration):
"""
Configuration file for this command
"""
def __init__(self, config_file_path):
super(AwsMetricsConfiguration, self).__init__(
config_file_path=config_file_path)
self.cloudwatch = {}
for region in self.regions:
self.cloudwatch[region] = AwsCloudwatchConfiguration(self, region)
def get_region_config(self, region):
"""
Gets the configuration for cloudwatch for the given region
Arguments:
region - the name of the region
"""
if region in self.cloudwatch:
return self.cloudwatch[region]
else:
return None
def validate(self):
"""
Checks that all required configuration items are set
Throws:
|
kerwinxu/barcodeManager
|
zxing/cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/SCCS.py
|
Python
|
bsd-2-clause
| 2,443
| 0.004912
|
"""SCons.Tool.SCCS.py
Tool-specific initialization for SCCS.
There normally shouldn't be any need to import this module direct
|
ly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
|
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/SCCS.py 5023 2010/06/14 22:05:46 scons"
import SCons.Action
import SCons.Builder
import SCons.Util
def generate(env):
"""Add a Builder factory function and construction variables for
SCCS to an Environment."""
def SCCSFactory(env=env):
""" """
import SCons.Warnings as W
W.warn(W.DeprecatedSourceCodeWarning, """The SCCS() factory is deprecated and there is no replacement.""")
act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
return SCons.Builder.Builder(action = act, env = env)
#setattr(env, 'SCCS', SCCSFactory)
env.SCCS = SCCSFactory
env['SCCS'] = 'sccs'
env['SCCSFLAGS'] = SCons.Util.CLVar('')
env['SCCSGETFLAGS'] = SCons.Util.CLVar('')
env['SCCSCOM'] = '$SCCS $SCCSFLAGS get $SCCSGETFLAGS $TARGET'
def exists(env):
return env.Detect('sccs')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
cmheisel/agile-analytics
|
docs/conf.py
|
Python
|
mit
| 9,780
| 0.000102
|
# -*- coding: utf-8 -*-
#
# agile-analytics documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 17 13:58:53 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'agile-analytics'
copyright = u'2016, Chris Heisel'
author = u'Chris Heisel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphi
|
nx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "sy
|
stem message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'agile-analytics v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'agile-analyticsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'agile-analytics.tex', u'agile-analytics Documentation',
u'Chris Heisel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = Non
|
madi/DeadTrees-BDEOSS
|
clipshape.py
|
Python
|
gpl-3.0
| 4,289
| 0.006761
|
__author__ = "Laura Martinez Sanchez"
__license__ = "GPL"
__version__ = "1.0"
__email__ = "lmartisa@gmail.com"
from osgeo import gdal, gdalnumeric, ogr, osr
import numpy as np
from PIL import Image, ImageDraw
from collections import defaultdict
import pickle
import time
from texture_common import *
#Uses a gdal geomatrix (gdal.GetGeoTransform()) to calculate the pixel location of a geospatial coordinate
def world2Pixel(geoMatrix, x, y):
ulX = geoMatrix[0]
ulY = geoMatrix[3]
xDist = geoMatrix[1]
yDist = geoMatrix[5]
rtnX = geoMatrix[2]
rtnY = geoMatrix[4]
pixel = int((x - ulX) / xDist)
line = int((y - ulY) / yDist)
return (pixel, line)
#Converts a Python Imaging Library array to a gdalnumeric image.
def imageToArray(i):
'''
Converts a Python Imaging Library (PIL) array to a gdalnumeric image.
'''
a = gdalnumeric.fromstring(i.tobytes(), 'b')
a.shape = i.im.size[1], i.im.size[0]
return a
def ReadClipArray(lrY, ulY, lrX, ulX, img):
clip = np.empty((img.RasterCount, lrY - ulY, lrX - ulX))
#Read only the pixels needed for do the clip
for band in range(img.RasterCount):
band += 1
imgaux = img.GetRasterBand(band).ReadAsArray(ulX, ulY, lrX - ulX, lrY - ulY)
clip[band - 1] = imgaux
return clip
#Does the clip of the shape
def ObtainPixelsfromShape(field, rasterPath, shapePath, INX, *args):
# field='zona'
# open dataset, also load as a gdal image to get geotransform
# INX can be false. If True, uses additional layers.
print "Starting clip...."
start = time.time()
if args:
texture_train_Path = args[0]
print texture_train_Path
img, textArrayShp = createTextureArray(texture_train_Path, rasterPath)
else:
#print"Indexes = False"
img = gdal.Open(rasterPath)
geoTrans = img.GetGeoTransform()
geoTransaux = img.GetGeoTransform()
proj = img.GetProjection()
#open shapefile
driver = ogr.GetDriverByName("ESRI Shapefile")
dataSource = driver.Open(shapePath, 0)
layer = dataSource.GetLayer()
clipdic = defaultdict(list)
count =
|
0
#Convert the layer extent to image pixel coordinates, we read only de pixels needed
for feature in layer:
minX, maxX, minY, maxY = feature.GetGeometryRef().GetEnvelope()
geoTrans = img.GetGeoTransform()
ulX, ulY = world2Pixel(geoTrans, minX, maxY)
lrX, lrY = world2Pixel(g
|
eoTrans, maxX, minY)
#print ulX,lrX,ulY,lrY
# Calculate the pixel size of the new image
pxWidth = int(lrX - ulX)
pxHeight = int(lrY - ulY)
clip = ReadClipArray(lrY, ulY, lrX, ulX, img)
#EDIT: create pixel offset to pass to new image Projection info
xoffset = ulX
yoffset = ulY
#print "Xoffset, Yoffset = ( %d, %d )" % ( xoffset, yoffset )
# Create a new geomatrix for the image
geoTrans = list(geoTrans)
geoTrans[0] = minX
geoTrans[3] = maxY
# Map points to pixels for drawing the boundary on a blank 8-bit, black and white, mask image.
points = []
pixels = []
geom = feature.GetGeometryRef()
pts = geom.GetGeometryRef(0)
[points.append((pts.GetX(p), pts.GetY(p))) for p in range(pts.GetPointCount())]
[pixels.append(world2Pixel(geoTrans, p[0], p[1])) for p in points]
rasterPoly = Image.new("L", (pxWidth, pxHeight), 1)
rasterize = ImageDraw.Draw(rasterPoly)
rasterize.polygon(pixels, 0)
mask = imageToArray(rasterPoly)
#SHow the clips of the features
# plt.imshow(mask)
# plt.show()
# Clip the image using the mask into a dict
temp = gdalnumeric.choose(mask, (clip, np.nan))
# #SHow the clips of the image
# plt.imshow(temp[4])
# plt.show()
temp = np.concatenate(temp.T)
temp = temp[~np.isnan(temp[:, 0])] #NaN
#print temp.shape
clipdic[str(feature.GetField(field))].append(temp)
count += temp.shape[0]
end = time.time()
print "Time clipshape:"
print (end - start)
print "count", count
return clipdic, count
##########################################################################
|
jagg81/translate-toolkit
|
translate/lang/fa.py
|
Python
|
gpl-2.0
| 2,471
| 0.002034
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007, 2010 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the
|
implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""T
|
his module represents Persian language.
For more information, see U{http://en.wikipedia.org/wiki/Persian_language}
"""
from translate.lang import common
import re
def guillemets(text):
def convertquotation(match):
prefix = match.group(1)
# Let's see that we didn't perhaps match an XML tag property like
# <a href="something">
if prefix == u"=":
return match.group(0)
return u"%s«%s»" % (prefix, match.group(2))
# Check that there is an even number of double quotes, otherwise it is
# probably not safe to convert them.
if text.count(u'"') % 2 == 0:
text = re.sub('(.|^)"([^"]+)"', convertquotation, text)
singlecount = text.count(u"'")
if singlecount:
if singlecount == text.count(u'`'):
text = re.sub("(.|^)`([^']+)'", convertquotation, text)
elif singlecount % 2 == 0:
text = re.sub("(.|^)'([^']+)'", convertquotation, text)
text = re.sub(u'(.|^)“([^”]+)”', convertquotation, text)
return text
class fa(common.Common):
"""This class represents Persian."""
listseperator = u"، "
puncdict = {
u",": u"،",
u";": u"؛",
u"?": u"؟",
#This causes problems with variables, so commented out for now:
#u"%": u"٪",
}
ignoretests = ["startcaps", "simplecaps"]
#TODO: check persian numerics
#TODO: zwj and zwnj?
def punctranslate(cls, text):
"""Implement "French" quotation marks."""
text = super(cls, cls).punctranslate(text)
return guillemets(text)
punctranslate = classmethod(punctranslate)
|
chadversary/chromiumos.chromite
|
scripts/cbuildbot.py
|
Python
|
bsd-3-clause
| 73,676
| 0.007397
|
#!/usr/bin/python
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Main builder code for Chromium OS.
Used by Chromium OS buildbot configuration for all Chromium OS builds including
full and pre-flight-queue builds.
"""
import collections
import distutils.version
import glob
import json
import logging
import multiprocessing
import optparse
import os
import pickle
import sys
import tempfile
import traceback
from chromite.cbuildbot import afdo
from chromite.cbuildbot import cbuildbot_config
from chromite.cbuildbot import cbuildbot_run
from chromite.cbuildbot import constants
from chromite.cbuildbot import failures_lib
from chromite.cbuildbot import manifest_version
from chromite.cbuildbot import remote_try
from chromite.cbuildbot import repository
from chromite.cbuildbot import results_lib
from chromite.cbuildbot import tee
from chromite.cbuildbot import trybot_patch_pool
from chromite.cbuildbot.stages import afdo_stages
from chromite.cbuildbot.stages import artifact_stages
from chromite.cbuildbot.stages import branch_stages
from chromite.cbuildbot.stages import build_stages
from chromite.cbuildbot.stages import chrome_stages
from chromite.cbuildbot.stages import completion_stages
from chromite.cbuildbot.stages import generic_stages
from chromite.cbuildbot.stages import release_stages
from chromite.cbuildbot.stages import report_stages
from chromite.cbuildbot.stages import sdk_stages
from chromite.cbuildbot.stages import sync_stages
from chromite.cbuildbot.stages import test_stages
from chromite.lib import cgroups
from chromite.lib import cleanup
from c
|
hromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import gclient
from chromite.lib import gerrit
from chromite.lib import git
from chromite.lib import osutils
from c
|
hromite.lib import patch as cros_patch
from chromite.lib import parallel
from chromite.lib import sudo
from chromite.lib import timeout_util
import mock
_DEFAULT_LOG_DIR = 'cbuildbot_logs'
_BUILDBOT_LOG_FILE = 'cbuildbot.log'
_DEFAULT_EXT_BUILDROOT = 'trybot'
_DEFAULT_INT_BUILDROOT = 'trybot-internal'
_BUILDBOT_REQUIRED_BINARIES = ('pbzip2',)
_API_VERSION_ATTR = 'api_version'
def _PrintValidConfigs(display_all=False):
"""Print a list of valid buildbot configs.
Args:
display_all: Print all configs. Otherwise, prints only configs with
trybot_list=True.
"""
def _GetSortKey(config_name):
config_dict = cbuildbot_config.config[config_name]
return (not config_dict['trybot_list'], config_dict['description'],
config_name)
COLUMN_WIDTH = 45
print
print 'config'.ljust(COLUMN_WIDTH), 'description'
print '------'.ljust(COLUMN_WIDTH), '-----------'
config_names = cbuildbot_config.config.keys()
config_names.sort(key=_GetSortKey)
for name in config_names:
if display_all or cbuildbot_config.config[name]['trybot_list']:
desc = cbuildbot_config.config[name].get('description')
desc = desc if desc else ''
print name.ljust(COLUMN_WIDTH), desc
print
def _GetConfig(config_name):
"""Gets the configuration for the build if it exists, None otherwise."""
if cbuildbot_config.config.has_key(config_name):
return cbuildbot_config.config[config_name]
def AcquirePoolFromOptions(options):
"""Generate patch objects from passed in options.
Args:
options: The options object generated by optparse.
Returns:
trybot_patch_pool.TrybotPatchPool object.
Raises:
gerrit.GerritException, cros_patch.PatchException
"""
gerrit_patches = []
local_patches = []
remote_patches = []
if options.gerrit_patches:
gerrit_patches = gerrit.GetGerritPatchInfo(
options.gerrit_patches)
for patch in gerrit_patches:
if patch.IsAlreadyMerged():
cros_build_lib.Warning('Patch %s has already been merged.' % str(patch))
if options.local_patches:
manifest = git.ManifestCheckout.Cached(options.sourceroot)
local_patches = cros_patch.PrepareLocalPatches(manifest,
options.local_patches)
if options.remote_patches:
remote_patches = cros_patch.PrepareRemotePatches(
options.remote_patches)
return trybot_patch_pool.TrybotPatchPool(gerrit_patches, local_patches,
remote_patches)
class Builder(object):
"""Parent class for all builder types.
This class functions as an abstract parent class for various build types.
Its intended use is builder_instance.Run().
Attributes:
_run: The BuilderRun object for this run.
archive_stages: Dict of BuildConfig keys to ArchiveStage values.
patch_pool: TrybotPatchPool.
"""
def __init__(self, builder_run):
"""Initializes instance variables. Must be called by all subclasses."""
self._run = builder_run
if self._run.config.chromeos_official:
os.environ['CHROMEOS_OFFICIAL'] = '1'
self.archive_stages = {}
self.patch_pool = trybot_patch_pool.TrybotPatchPool()
self._build_image_lock = multiprocessing.Lock()
def Initialize(self):
"""Runs through the initialization steps of an actual build."""
if self._run.options.resume:
results_lib.LoadCheckpoint(self._run.buildroot)
self._RunStage(build_stages.CleanUpStage)
def _GetStageInstance(self, stage, *args, **kwargs):
"""Helper function to get a stage instance given the args.
Useful as almost all stages just take in builder_run.
"""
# Normally the default BuilderRun (self._run) is used, but it can
# be overridden with "builder_run" kwargs (e.g. for child configs).
builder_run = kwargs.pop('builder_run', self._run)
return stage(builder_run, *args, **kwargs)
def _SetReleaseTag(self):
"""Sets run.attrs.release_tag from the manifest manager used in sync.
Must be run after sync stage as syncing enables us to have a release tag,
and must be run before any usage of attrs.release_tag.
TODO(mtennant): Find a bottleneck place in syncing that can set this
directly. Be careful, as there are several kinds of syncing stages, and
sync stages have been known to abort with sys.exit calls.
"""
manifest_manager = getattr(self._run.attrs, 'manifest_manager', None)
if manifest_manager:
self._run.attrs.release_tag = manifest_manager.current_version
else:
self._run.attrs.release_tag = None
cros_build_lib.Debug('Saved release_tag value for run: %r',
self._run.attrs.release_tag)
def _RunStage(self, stage, *args, **kwargs):
"""Wrapper to run a stage.
Args:
stage: A BuilderStage class.
args: args to pass to stage constructor.
kwargs: kwargs to pass to stage constructor.
Returns:
Whatever the stage's Run method returns.
"""
stage_instance = self._GetStageInstance(stage, *args, **kwargs)
return stage_instance.Run()
@staticmethod
def _RunParallelStages(stage_objs):
"""Run the specified stages in parallel.
Args:
stage_objs: BuilderStage objects.
"""
steps = [stage.Run for stage in stage_objs]
try:
parallel.RunParallelSteps(steps)
except BaseException as ex:
# If a stage threw an exception, it might not have correctly reported
# results (e.g. because it was killed before it could report the
# results.) In this case, attribute the exception to any stages that
# didn't report back correctly (if any).
for stage in stage_objs:
for name in stage.GetStageNames():
if not results_lib.Results.StageHasResults(name):
results_lib.Results.Record(name, ex, str(ex))
raise
def _RunSyncStage(self, sync_instance):
"""Run given |sync_instance| stage and be sure attrs.release_tag set."""
try:
sync_instance.Run()
finally:
self._SetReleaseTag()
def GetSyncInstance(self):
"""Returns an instance of a SyncStage that should be run.
Subclasses must override this method.
"""
raise NotImplementedError()
def GetCompletionInstance(se
|
lucdom/xCrawler
|
xcrawler/compatibility/string_converter/string_converter_python3.py
|
Python
|
gpl-2.0
| 509
| 0.003929
|
from xcrawler.compatibility.string_converter.compatible_string_converter import CompatibleStringConverter
class StringConverterPython3(CompatibleStringConverter):
"""A Python 3 compatible class for converting a string to a specified type.
"""
def convert_to_s
|
tring(self, string):
string = self.try_convert_to_unicode_string(string)
return string
def list_convert
|
_to_string(self, list_strings):
return [self.try_convert_to_unicode_string(s) for s in list_strings]
|
AustinHartman/randomPrograms
|
primFinder.py
|
Python
|
gpl-3.0
| 209
| 0.004785
|
upperLimit = 1000
oddCounter = 3
oddList = []
n = 0
while upperLimit >= oddCount
|
er:
oddList.append(oddCounter)
oddCounter += 2
while oddList(n) < (upperLimit - 1):
if oddList(n)
|
%
print(oddList)
|
openqt/algorithms
|
leetcode/python/lc295-find-median-from-data-stream.py
|
Python
|
gpl-3.0
| 1,385
| 0.00722
|
# coding=utf-8
import unittest
"""295. Find Median from Data Stream
https://leetcode.com/problems/find-median-from-data-stream/description/
Median is the middle value in an ordered integer list. If the size of the list
is even, there is no middle value. So the median is the mean of the two middle
value.
For example,
`[2,3,4]`, the median is `3`
`[2,3]`, the median is `(2 + 3) / 2 = 2.5`
Design a data structure that supports the following two operations:
* void addNum(int num) - Add a integer number from the data stream to the data structure.
* double findMedian() - Return the median of all elements so far.
**Example:**
addNum(1)
addNum(2)
findMedian() -> 1.5
addNum(3)
findMedian() -> 2
Similar Questions:
Sliding Window Median (sliding-window-median)
"""
class M
|
edianFinder(object):
def __init__(self):
"""
initialize your data structure
|
here.
"""
def addNum(self, num):
"""
:type num: int
:rtype: void
"""
def findMedian(self):
"""
:rtype: float
"""
# Your MedianFinder object will be instantiated and called as such:
# obj = MedianFinder()
# obj.addNum(num)
# param_2 = obj.findMedian()
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
caglar10ur/func
|
func/minion/modules/nm/logger.py
|
Python
|
gpl-2.0
| 3,426
| 0.017805
|
"""A very simple logger that tries to be concurrency-safe."""
import os, sys
import time
import traceback
import subprocess
import select
LOG_FILE = '/var/log/nodemanager.func'
# basically define 3 levels
LOG_NONE=0
LOG_NODE=1
LOG_VERBOSE=2
# default is to log a reasonable amount of stuff for when running on operational nodes
LOG_LEVEL=1
def set_level(level):
global LOG_LEVEL
assert level in [LOG_NONE,LOG_NODE,LOG_VERBOSE]
LOG_LEVEL=level
def verbose(msg):
log('(v) '+msg,LOG_VERBOSE)
def log(msg,level=LOG_NODE):
"""Write <msg> to the log file if level >= current log level (default LOG_NODE)."""
if (level > LOG_LEVEL):
return
try:
fd = os.open(LOG_FILE, os.O_WRONLY | os.O_CREAT | os.O_APPEND, 0600)
if not msg.endswith('\n'): msg += '\n'
os.write(fd, '%s: %s' % (time.asctime(time.gmtime()), msg))
os.close(fd)
except OSError:
sys.stderr.write(msg)
sys.stderr.flush()
def log_exc(msg="",name=None):
"""Log the traceback resulting from an exception."""
if name:
log("%s: EXCEPTION caught <%s> \n %s" %(name, msg, traceback.format_exc()))
else:
log("EXCEPTION caught <%s> \n %s" %(msg, traceback.format_exc()))
#################### child processes
# avoid waiting until the process returns;
# that makes debugging of hanging children hard
class Buffer:
def __init__ (self,message='log_call: '):
self.buffer=''
self.message=message
def add (self,c):
self.buffer += c
if c=='\n': self.flush()
def flush (self):
if self.buffer:
log (self.message + self.buffer)
self.buffer=''
# time out in seconds - avoid hanging subprocesses - default is 5 minutes
default_timeout_minutes=5
# returns a bool that is True when everything goes fine and the retcod is 0
def log_call(command,timeout=default_timeout_minutes*60,poll=1):
message=" ".join(command)
log("log_call: running command %s" % message)
verbose("log_call: timeout=%r s" % timeout)
verbose("log_call: poll=%r s" % poll)
trigger=time
|
.time()+timeout
result = False
try:
child = subprocess.Popen(command, bufsize=1,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
buffer = Buffer()
|
while True:
# see if anything can be read within the poll interval
(r,w,x)=select.select([child.stdout],[],[],poll)
if r: buffer.add(child.stdout.read(1))
# is process over ?
returncode=child.poll()
# yes
if returncode != None:
buffer.flush()
# child is done and return 0
if returncode == 0:
log("log_call:end command (%s) completed" % message)
result=True
break
# child has failed
else:
log("log_call:end command (%s) returned with code %d" %(message,returncode))
break
# no : still within timeout ?
if time.time() >= trigger:
buffer.flush()
child.terminate()
log("log_call:end terminating command (%s) - exceeded timeout %d s"%(message,timeout))
break
except: log_exc("failed to run command %s" % message)
return result
|
PaulBrownMagic/LED_Arcade
|
displays/writer.py
|
Python
|
gpl-3.0
| 1,026
| 0.000975
|
import numpy as np
from displays.letters import ALPHABET
class Writer:
"""Produce scrolling text for the LED display, frame by frame"""
def __init__(self):
self.font = ALPHABET
self.spacer = np.zeros([8, 1], dtype=int)
self.phrase = None
def make_phrase(self, phrase):
|
"""Convert a string into a long numpy array with spacing"""
|
# phrase.lower() called because ALPHABET currently doesn't have capitals
converted = [np.hstack([self.font[letter], self.spacer])
for letter in phrase.lower()]
self.phrase = np.hstack(converted)
def generate_frames(self):
"""Produce single 8*8 frames scrolling across phrase"""
height, width = np.shape(self.phrase)
for frame in range(width - 8):
yield self.phrase[:, frame:frame + 8]
def write(self, phrase):
"""Easily get frames for a phrase"""
self.make_phrase(phrase)
for frame in self.generate_frames():
yield frame
|
jalanb/jab
|
src/python/site/οs.py
|
Python
|
mit
| 291
| 0
|
"""Handle nice names"""
import bas
|
e64
from pysyte.oss import platforms
def nice(data):
return bas
|
e64.b64encode(bytes(data, 'utf-8'))
def name(data):
return base64.b64decode(data).decode('utf-8')
def chmod(data, *_):
platforms.put_clipboard_data(name(data))
return ''
|
hfp/tensorflow-xsmm
|
tensorflow/contrib/seq2seq/python/kernel_tests/beam_search_ops_test.py
|
Python
|
apache-2.0
| 5,574
| 0.005203
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for contrib.seq2seq.python.seq2seq.beam_search_ops."""
# pylint: disable=unused-import,g-bad-import-order
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: enable=unused-import
import itertools
import numpy as np
from tensorflow.contrib.seq2seq.python.ops import beam_search_ops
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
def _transpose_batch_time(x):
return np.transpose(x, [1, 0, 2]).astype(np.int32)
class GatherTreeTest(test.TestCase):
def testGatherTreeOne(self):
# (max_time = 4, batch_size = 1, beams = 3)
end_token = 10
step_ids = _transpose_batch_time(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9], [-1, -1, -1]]])
parent_ids = _transpose_batch_time(
[[[0, 0, 0], [0, 1, 1], [2, 1, 2], [-1, -1, -1]]])
max_sequence_lengths = [3]
expected_result = _transpose_batch_time([[[2, 2, 2], [6, 5, 6], [7, 8, 9],
[10, 10, 10]]])
beams = beam_search_ops.gather_tree(
step_ids=step_ids,
parent_ids=parent_ids,
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
with self.session(use_gpu=True):
self.assertAllEqual(expected_result, beams.eval())
def testBadParentValuesOnCPU(self):
# (batch_size = 1, max_time = 4, beams = 3)
# bad parent in beam 1 time 1
end_token = 10
step_ids = _transpose_batch_time(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9], [-1, -1, -1]]])
parent_ids = _transpose_batch_time(
[[[0, 0, 0], [0, -1, 1], [2, 1, 2], [-1, -1, -1]]])
max_sequence_lengths = [3]
with ops.device("/cpu:0"):
beams = beam_search_ops.gather_tree(
step_ids=step_ids,
parent_ids=parent_ids,
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
with self.cached_session():
with self.assertRaisesOpError(
r"parent id -1 at \(batch, time, beam\) == \(0, 0, 1\)"):
_ = beams.eval()
def testBadParentValuesOnGPU(self):
# Only want to run this test on CUDA devices, as gather_tree is not
# registered for SYCL devices.
if not test.is_gpu_available(cuda_only=True):
return
# (max_time = 4, batch_size = 1, beams = 3)
# bad parent in beam 1 time 1; appears as a negative index at time 0
end_token = 10
step_ids = _transpose_batch_time(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9], [-1, -1, -1]]])
parent_ids = _transpose_batch_time(
[[[0, 0, 0], [0, -1, 1], [2, 1, 2], [-1, -1, -1]]])
max_sequence_lengths = [3]
expected_result = _transpose_batch_time([[[2, -1, 2], [6, 5, 6], [7, 8, 9],
[10, 10, 10]]])
with ops.device("/device:GPU:0"):
beams = beam_search_ops.gather_tree(
step_ids=step_ids,
parent_ids=parent_ids,
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
with self.session(use_gpu=True):
self.assertAllEqual(expected_result, beams.eval())
def testGatherTreeBatch(self):
batch_size = 10
beam_width = 15
max_time = 8
max_sequence_lengths = [0, 1, 2, 4, 7, 8, 9, 10, 11, 0]
end_token = 5
with self.session(use_gpu=True):
step_ids = np.random.randint(
0, high=end_token + 1, size=(max_time, batch_size, beam_width))
parent_ids = np.random.randint(
0, high=beam_width - 1, size=(max_time, batch_size, beam_width))
beams = beam_search_ops.gather_tree(
step_ids=step_ids.astype(np.in
|
t32),
parent_ids=parent_ids.astype(np.int32),
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
self.
|
assertEqual((max_time, batch_size, beam_width), beams.shape)
beams_value = beams.eval()
for b in range(batch_size):
# Past max_sequence_lengths[b], we emit all end tokens.
b_value = beams_value[max_sequence_lengths[b]:, b, :]
self.assertAllClose(b_value, end_token * np.ones_like(b_value))
for batch, beam in itertools.product(
range(batch_size), range(beam_width)):
v = np.squeeze(beams_value[:, batch, beam])
if end_token in v:
found_bad = np.where(v == -1)[0]
self.assertEqual(0, len(found_bad))
found = np.where(v == end_token)[0]
found = found[0] # First occurrence of end_token.
# If an end_token is found, everything before it should be a
# valid id and everything after it should be -1.
if found > 0:
self.assertAllEqual(
v[:found - 1] >= 0, np.ones_like(v[:found - 1], dtype=bool))
self.assertAllClose(v[found + 1:],
end_token * np.ones_like(v[found + 1:]))
if __name__ == "__main__":
test.main()
|
LeGoldFish/DTR2Sync
|
setup.py
|
Python
|
mit
| 79
| 0
|
from distutil
|
s.core import setup
import py2exe
setup(console=['DTR2Sync.py
|
'])
|
Mester/demo-day-vikings
|
tests/test_post.py
|
Python
|
unlicense
| 671
| 0.004471
|
import unittest
from music_app.post import Post
class TestPost(unittest.TestCase):
"""Class to test the Post Class"""
def test_object_creation(self):
p = Post('Promises', 'Dreamers', 'rock', '2014', 8, 'http://example.com', 146666666.66, 'https://www.youtube.com')
self.assertEqual(p.title, 'Promises')
self.assertEqual(p.artist, 'Dreamers')
self.assertEqual(p.genre, 'rock')
self.as
|
sertEqual(p.year, '2014')
self.assertEqual(p.score, 8)
self.asser
|
tEqual(p.thumbnail, 'http://example.com')
self.assertEqual(p.timestamp, 146666666.66)
self.assertEqual(p.url, 'https://www.youtube.com')
|
spookylukey/django-paypal
|
paypal/standard/pdt/tests/test_urls.py
|
Python
|
mit
| 755
| 0.001325
|
from __future__ import unicode_literals
try:
from django.urls import re_path
except ImportError:
from django.conf.urls import url as re_path
from django.shortcuts import render
from django.views.decorators.http import require_GET
from paypal.standard.pdt.vi
|
ews import process_pdt
@require_GET
def pdt(request, template="pdt/pdt.html", context=None):
"
|
""Standard implementation of a view that processes PDT and then renders a template
For more advanced uses, create your own view and call process_pdt.
"""
pdt_obj, failed = process_pdt(request)
context = context or {}
context.update({"failed": failed, "pdt_obj": pdt_obj})
return render(request, template, context)
urlpatterns = [
re_path(r'^pdt/$', pdt),
]
|
listyque/TACTIC-Handler
|
thlib/side/python_minifier/rename/renamer.py
|
Python
|
epl-1.0
| 6,672
| 0.000899
|
import ast
from python_minifier.rename.binding import NameBinding
from python_minifier.rename.name_generator import name_filter
from python_minifier.rename.util import is_namespace
def all_bindings(node):
"""
All bindings in a module
:param node: The module to get bindings in
:type node: :class:`ast.AST`
:rtype: Iterable[ast.AST, Binding]
"""
if is_namespace(node):
for binding in node.bindings:
yield node, binding
for child in ast.iter_child_nodes(node):
for namespace, binding in all_bindings(child):
yield namespace, binding
def sorted_bindings(module):
"""
All bindings in a modules sorted by descending number of references
:param module: The module
|
to get bindings in
:type module: :class:`ast.AST`
:rtype: Iterable[ast.AST, Binding]
"""
def comp(tup):
namespace, binding = tup
return len(binding.references)
return sorted(all_bindings(module), key=comp, reverse=True)
def reservation_scope(namespace, binding):
"""
Get the namespaces that are in the bindings reservation scope
Returns the namespace nodes the binding name must be resolv
|
able in
:param namespace: The local namespace of a binding
:type namespace: :class:`ast.AST`
:param binding: The binding to get the reservation scope for
:type binding: Binding
:rtype: set[ast.AST]
"""
namespaces = set([namespace])
for node in binding.references:
while node is not namespace:
namespaces.add(node.namespace)
node = node.namespace
return namespaces
def add_assigned(node):
"""
Add the assigned_names attribute to namespace nodes in a module
:param node: The module to add the assigned_names attribute to
:type node: :class:`ast.Module`
"""
if is_namespace(node):
node.assigned_names = set()
for child in ast.iter_child_nodes(node):
add_assigned(child)
def reserve_name(name, reservation_scope):
"""
Reserve a name in a reservation scope
:param str name: The name to reserve
:param reservation_scope:
:type reservation_scope: Iterable[:class:`ast.AST`]
"""
for namespace in reservation_scope:
namespace.assigned_names.add(name)
class UniqueNameAssigner(object):
"""
Assign new names to renamed bindings
Assigns a unique name to every binding
"""
def __init__(self):
self.name_generator = name_filter()
self.names = []
def available_name(self):
return next(self.name_generator)
def __call__(self, module):
assert isinstance(module, ast.Module)
for namespace, binding in sorted_bindings(module):
if binding.allow_rename:
binding.new_name = self.available_name()
return module
class NameAssigner(object):
"""
Assign new names to renamed bindings
This assigner creates a name 'reservation scope' containing each namespace a binding is referenced in, including
transitive namespaces. Bindings are then assigned the first available name that has no references in their
reservation scope. This means names will be reused in sibling namespaces, and shadowed where possible in child
namespaces.
Bindings are assigned names in order of most references, with names assigned shortest first.
"""
def __init__(self, name_generator=None):
self.name_generator = name_generator if name_generator is not None else name_filter()
self.names = []
def iter_names(self):
for name in self.names:
yield name
while True:
name = next(self.name_generator)
self.names.append(name)
yield name
def available_name(self, reservation_scope, prefix=''):
"""
Search for the first name that is not in reservation scope
"""
for name in self.iter_names():
if self.is_available(prefix + name, reservation_scope):
return prefix + name
def is_available(self, name, reservation_scope):
"""
Is a name unreserved in a reservation scope
:param str name: the name to check availability of
:param reservation_scope: The scope to check
:type reservation_scope: Iterable[:class:`ast.AST`]
:rtype: bool
"""
for namespace in reservation_scope:
if name in namespace.assigned_names:
return False
return True
def __call__(self, module, prefix_globals, reserved_globals=None):
assert isinstance(module, ast.Module)
add_assigned(module)
for namespace, binding in all_bindings(module):
if binding.reserved is not None:
scope = reservation_scope(namespace, binding)
reserve_name(binding.reserved, scope)
if reserved_globals is not None:
for name in reserved_globals:
module.assigned_names.add(name)
for namespace, binding in sorted_bindings(module):
scope = reservation_scope(namespace, binding)
if binding.allow_rename:
if isinstance(namespace, ast.Module) and prefix_globals:
name = self.available_name(scope, prefix='_')
else:
name = self.available_name(scope)
def should_rename():
if binding.should_rename(name):
return True
# It's no longer efficient to do this rename
if isinstance(binding, NameBinding):
# Check that the original name is still available
if binding.reserved == binding.name:
# We already reserved it (this is probably an arg)
return False
if not self.is_available(binding.name, scope):
# The original name has already been assigned to another binding,
# so we need to rename this anyway.
return True
return False
if should_rename():
binding.rename(name)
else:
# Any existing name will become reserved
binding.disallow_rename()
if binding.name is not None:
reserve_name(binding.name, scope)
return module
def rename(module, prefix_globals=False, preserved_globals=None):
NameAssigner()(module, prefix_globals, preserved_globals)
|
jmschrei/scikit-learn
|
sklearn/neural_network/multilayer_perceptron.py
|
Python
|
bsd-3-clause
| 49,926
| 0.00002
|
"""Multi-layer Perceptron
"""
# Authors: Issam H. Laradji <issam.laradji@gmail.com>
# Andreas Mueller
# Jiyuan Qian
# Licence: BSD 3 clause
import numpy as np
from abc import ABCMeta, abstractmethod
from scipy.optimize import fmin_l_bfgs_b
import warnings
from ..base import BaseEstimator, ClassifierMixin, RegressorMixin
from ._base import logistic, softmax
from ._base import ACTIVATIONS, DERIVATIVES, LOSS_FUNCTIONS
from ._stochastic_optimizers import SGDOptimizer, AdamOptimizer
from ..cross_validation import train_test_split
from ..externals import six
from ..preprocessing import LabelBinarizer
from ..utils import gen_batches, check_random_state
from ..utils import shuffle
from ..utils import check_array, check_X_y, column_or_1d
from ..exceptions import ConvergenceWarning
from ..utils.extmath import safe_sparse_dot
from ..utils.validation import check_is_fitted
from ..utils.multiclass import _check_partial_fit_first_call
_STOCHASTIC_ALGOS = ['sgd', 'adam']
def _pack(coefs_, intercepts_):
"""Pack the parameters into a single vector."""
return np.hstack([l.ravel() for l in coefs_ + intercepts_])
class BaseMultilayerPerceptron(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for MLP classification and regression.
Warning: This class should not be used directly.
Use derived classes instead.
"""
@abstractmethod
def __init__(self, hidden_layer_sizes, activation, algorithm,
alpha, batch_size, learning_rate, learning_rate_init, power_t,
max_iter, loss, shuffle, random_state, tol, verbose,
warm_start, momentum, nesterovs_momentum, early_stopping,
validation_fraction, beta_1, beta_2, epsilon):
self.activation = activation
self.algorithm = algorithm
self.alpha = alpha
self.batch_size = batch_size
self.learning_rate = learning_rate
self.learning_rate_init = learning_rate_init
self.power_t = power_t
self.max_iter = max_iter
self.loss = loss
self.hidden_layer_sizes = hidden_layer_sizes
self.shuffle = shuffle
self.random_state = random_state
self.tol = tol
self.verbose = verbose
self.warm_start = warm_start
self.momentum = momentum
self.nesterovs_momentum = nesterovs_momentum
self.early_stopping = early_stopping
self.validation_fraction = validation_fraction
self.beta_1 = beta_1
self.beta_2 = beta_2
self.epsilon = epsilon
def _unpack(self, packed_parameters):
"""Extract the coefficients and intercepts from packed_parameters."""
for i in range(self.n_layers_ - 1):
start, end, shape = self._coef_indptr[i]
self.coefs_[i] = np.reshape(packed_parameters[start:end], shape)
start, end = self._intercept_indptr[i]
self.intercepts_[i] = packed_parameters[start:end]
def _forward_pass(self, activations, with_output_activation=True):
"""Perform a forward pass on the network by computing the values
of the neurons in the hidden layers and the output layer.
Parameters
----------
activations: list, length = n_layers - 1
The ith element of the list holds the values of the ith layer.
with_output_activation : bool, default True
If True, the output passes through the
|
output activation
function, which is either the softmax function or the
logistic function
"""
hidden_activation = ACTIVATIONS[self.activation]
# Iterate over the hidden layers
for i in range(self.n_layers_ - 1):
activations[i + 1] = safe_sparse_dot(activations[i],
|
self.coefs_[i])
activations[i + 1] += self.intercepts_[i]
# For the hidden layers
if (i + 1) != (self.n_layers_ - 1):
activations[i + 1] = hidden_activation(activations[i + 1])
# For the last layer
if with_output_activation:
output_activation = ACTIVATIONS[self.out_activation_]
activations[i + 1] = output_activation(activations[i + 1])
return activations
def _compute_loss_grad(self, layer, n_samples, activations, deltas,
coef_grads, intercept_grads):
"""Compute the gradient of loss with respect to coefs and intercept for
specified layer.
This function does backpropagation for the specified one layer.
"""
coef_grads[layer] = safe_sparse_dot(activations[layer].T,
deltas[layer])
coef_grads[layer] += (self.alpha * self.coefs_[layer])
coef_grads[layer] /= n_samples
intercept_grads[layer] = np.mean(deltas[layer], 0)
return coef_grads, intercept_grads
def _loss_grad_lbfgs(self, packed_coef_inter, X, y, activations, deltas,
coef_grads, intercept_grads):
"""Compute the MLP loss function and its corresponding derivatives
with respect to the different parameters given in the initialization.
Returned gradients are packed in a single vector so it can be used
in l-bfgs
Parameters
----------
packed_parameters : array-like
A vector comprising the flattened coefficients and intercepts.
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
y : array-like, shape (n_samples,)
The target values.
activations: list, length = n_layers - 1
The ith element of the list holds the values of the ith layer.
deltas : list, length = n_layers - 1
The ith element of the list holds the difference between the
activations of the i + 1 layer and the backpropagated error.
More specifically, deltas are gradients of loss with respect to z
in each layer, where z = wx + b is the value of a particular layer
before passing through the activation function
coef_grad : list, length = n_layers - 1
The ith element contains the amount of change used to update the
coefficient parameters of the ith layer in an iteration.
intercept_grads : list, length = n_layers - 1
The ith element contains the amount of change used to update the
intercept parameters of the ith layer in an iteration.
Returns
-------
loss : float
grad : array-like, shape (number of nodes of all layers,)
"""
self._unpack(packed_coef_inter)
loss, coef_grads, intercept_grads = self._backprop(
X, y, activations, deltas, coef_grads, intercept_grads)
self.n_iter_ += 1
grad = _pack(coef_grads, intercept_grads)
return loss, grad
def _backprop(self, X, y, activations, deltas, coef_grads,
intercept_grads):
"""Compute the MLP loss function and its corresponding derivatives
with respect to each parameter: weights and bias vectors.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
y : array-like, shape (n_samples,)
The target values.
activations: list, length = n_layers - 1
The ith element of the list holds the values of the ith layer.
deltas : list, length = n_layers - 1
The ith element of the list holds the difference between the
activations of the i + 1 layer and the backpropagated error.
More specifically, deltas are gradients of loss with respect to z
in each layer, where z = wx + b is the value of a particular layer
before passing through the activation function
coef_grad : list, length = n_layers - 1
The ith element contains the amount of change used to update the
coefficient parameters of the ith layer in an iteration.
interc
|
stormi/tsunami
|
src/primaires/pnj/editeurs/pedit/edt_stats.py
|
Python
|
bsd-3-clause
| 3,848
| 0.002865
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF TH
|
E USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le contexte éditeur
|
EdtStats"""
from primaires.interpreteur.editeur import Editeur
from primaires.format.fonctions import contient
class EdtStats(Editeur):
"""Classe définissant le contexte éditeur 'stats'.
Ce contexte permet d'éditer les stats d'une race.
"""
def __init__(self, pere, objet=None, attribut=None):
"""Constructeur de l'éditeur"""
Editeur.__init__(self, pere, objet, attribut)
def accueil(self):
"""Message d'accueil"""
msg = \
"Entrez le |ent|nom|ff| de la stat, un signe |ent|/|ff| " \
"et la valeur pour modifier une stat.\nExemple : |cmd|force / " \
"45|ff|\n\nEntrez |ent|/|ff| pour revenir à la fenêtre parente\n\n"
stats = self.objet
msg += "+-" + "-" * 20 + "-+-" + "-" * 6 + "-+\n"
msg += "| " + "Nom".ljust(20) + " | " + "Valeur".ljust(6) + " |\n"
msg += "| " + " ".ljust(20) + " | " + " ".ljust(6) + " |"
for stat in stats:
if not stat.max:
msg += "\n| |ent|" + stat.nom.ljust(20) + "|ff| | "
msg += str(stat.defaut).rjust(6) + " |"
return msg
def interpreter(self, msg):
"""Interprétation du message"""
try:
nom_stat, valeur = msg.split(" / ")
except ValueError:
self.pere << "|err|Syntaxe invalide.|ff|"
else:
# On cherche la stat
stat = None
for t_stat in self.objet:
if not t_stat.max and contient(t_stat.nom, nom_stat):
stat = t_stat
break
if not stat:
self.pere << "|err|Cette stat est introuvable.|ff|"
else:
# Convertion
try:
valeur = int(valeur)
assert valeur > 0
assert valeur >= stat.marge_min
assert valeur <= stat.marge_max
except (ValueError, AssertionError):
self.pere << "|err|Valeur invalide.|ff|"
else:
stat.defaut = valeur
stat.courante = valeur
self.actualiser()
|
rachel3834/mulens-tom
|
scripts/log_utilities.py
|
Python
|
gpl-3.0
| 2,938
| 0.017699
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 17 00:00:05 2016
@author: rstreet
"""
import logging
from os import path, remove
from sys import exit
from astropy.time import Time
|
from datetime import datetime
import glob
def start_day_log( config, log_name, version=None ):
"""Function to initialize a new log file. The naming convention for the
file is [log_name]_[UTC_date].log. A new file is automatically created
if none for the current UTC day already exist, otherwise output is appended
to an existing file.
This function also configures the log file to
|
provide timestamps for
all entries.
Parameters:
config dictionary Script configuration including parameters
log_directory Directory path
log_root_name Name of the log file
log_name string Name applied to the logger Object
(not to be confused with the log_root_name)
console Boolean Switch to capture logging data from the
stdout. Normally set to False.
Returns:
log open logger object
"""
log_file = get_log_path( config, config['log_root_name'] )
# To capture the logging stream from the whole script, create
# a log instance together with a console handler.
# Set formatting as appropriate.
log = logging.getLogger( log_name )
if len(log.handlers) == 0:
log.setLevel( logging.INFO )
file_handler = logging.FileHandler( log_file )
file_handler.setLevel( logging.INFO )
formatter = logging.Formatter( fmt='%(asctime)s %(message)s', \
datefmt='%Y-%m-%dT%H:%M:%S' )
file_handler.setFormatter( formatter )
log.addHandler( file_handler )
log.info( '\n------------------------------------------------------\n')
if version != None:
log.info('Software version: '+version+'\n')
return log
def get_log_path( config, log_root_name ):
"""Function to determine the path and name of the log file, giving it
a date-stamp in UTC.
Parameters:
config dictionary Script configuration including parameters
log_directory Directory path
log_root_name Name of the log file
Returns:
log_file string Path/log_name string
"""
ts = Time.now()
ts = ts.iso.split()[0]
log_file = path.join( config['log_dir'], \
log_root_name + '_' + ts + '.log' )
return log_file
def end_day_log( log ):
"""Function to cleanly shutdown logging functions with last timestamped
entry.
Parameters:
log logger Object
Returns:
None
"""
log.info( 'Processing complete\n' )
logging.shutdown()
|
WPTechInnovation/worldpay-within-sdk
|
wrappers/python_2-7/EventServer.py
|
Python
|
mit
| 3,460
| 0.011561
|
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
import threading
from wpwithin.WPWithinCallback import Client
from wpwithin.WPWithinCallback import Processor
class CallbackHandler:
def __init__(self):
self.log = {}
def beginServiceDelivery(self, serviceId, serviceDeliveryToken, unitsToSupply):
try:
print "event from core - onBeginServiceDelivery()"
print "ServiceID: {0}\n".format(serviceId)
print "Un
|
itsToSupply: {0}\n".format(unitsToSupply)
print "SDT.Key: {0}\n".format(serviceDeliveryToken.key)
print "SDT.Expiry: {0}\n".format(serviceDeliveryToken.expiry)
print "SDT.Issued: {0}\n".format(serviceDeliveryToken.issued)
print "SDT.Signature: {0}\n".format(serviceDeliveryToken.signature)
print "SDT.RefundOnExpiry: {0}\n".format(serviceDeliveryToken.refundOnExpi
|
ry)
except Exception as e:
print "doBeginServiceDelivery failed: " + str(e)
def endServiceDelivery(self, serviceId, serviceDeliveryToken, unitsReceived):
try:
print "event from core - onEndServiceDelivery()"
print "ServiceID: {0}\n".format(serviceId)
print "UnitsReceived: {0}\n".format(unitsReceived)
print "SDT.Key: {0}\n".format(serviceDeliveryToken.key)
print "SDT.Expiry: {0}\n".format(serviceDeliveryToken.expiry)
print "SDT.Issued: {0}\n".format(serviceDeliveryToken.issued)
print "SDT.Signature: {0}\n".format(serviceDeliveryToken.signature)
print "SDT.RefundOnExpiry: {0}\n".format(serviceDeliveryToken.refundOnExpiry)
except Exception as e:
print "doEndServiceDelivery failed: " + str(e)
class EventServer:
server = None
def startServer(self, server):
print "##### STARTING WRAPPER SERVER to receive callbacks #####"
print "##### SERVER: " + str(server)
server.serve()
def stop():
if server != None:
server.setShouldStop(True)
def __init__(self, listenerHandler, hostname, port):
try:
if(listenerHandler == None):
print "Using build-in handler"
theListenerToUse = CallbackHandler()
else:
print "Using custom handler"
theListenerToUse = listenerHandler
processor = Processor(theListenerToUse)
transport = TSocket.TServerSocket(host=hostname, port=port)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
#self.server = TServer.TThreadedServer(processor, transport, tfactory, pfactory)
self.server = TServer.TSimpleServer(processor, transport, tfactory, pfactory)
print "Serving the Wrapper listener, port: " + str(port)
thread = threading.Thread(target=self.startServer, args=([self.server]))
thread.daemon = True # Daemonize thread
thread.start() # Start the execution
print "##### SERVER: " + str(self.server)
print "##### SERVER: SHOULD HAVE STARTED"
print "Should have started Wrapper listener"
except Exception as e:
print "Event server setup failed: " + str(e)
|
mabuchilab/Instrumental
|
instrumental/__about__.py
|
Python
|
gpl-3.0
| 374
| 0.002674
|
# -*- coding: utf-8 -*
|
-
# Copyright 2016-2017 Nate Bogdanowicz
import datetime
__distname__ = "Instrumental-lib"
__version__ = "0.6"
__author__ = "Nate Bogdanowicz"
__email__ = "natezb@gmail.com"
__url__ = 'https://github.c
|
om/mabuchilab/Instrumental'
__license__ = "GPLv3"
__copyright__ = "Copyright 2013-{}, {}".format(datetime.date.today().year, __author__)
|
beeftornado/sentry
|
src/sentry_plugins/jira_ac/utils.py
|
Python
|
bsd-3-clause
| 2,784
| 0.000359
|
from __future__ import absolute_import
import hashlib
import jwt
from six.moves.urllib.parse import quote
from sentry.shared_integrations.exceptions import ApiError
def percent_encode(val):
# see https://en.wikipedia.org/wiki/Percent-encoding
return quote(val.encode("utf8", errors="replace")).replace("%7E", "~").replace("/", "%2F")
def get_query_hash(uri, method, query_params=None):
# see
# https://developer.atlassian.com/static/connect/docs/latest/concepts/understanding-jwt.html#qsh
uri = uri.rstrip("/")
method = method.upper()
if query_params is None:
query_params = {}
sorted_query = []
for k, v in sorted(query_params.items()):
# don't include jwt query param
if
|
k != "jwt":
|
if isinstance(v, list):
param_val = [percent_encode(val) for val in v].join(",")
else:
param_val = percent_encode(v)
sorted_query.append("%s=%s" % (percent_encode(k), param_val))
query_string = "%s&%s&%s" % (method, uri, "&".join(sorted_query))
return hashlib.sha256(query_string.encode("utf8")).hexdigest()
def get_jira_auth_from_request(request):
# https://developer.atlassian.com/static/connect/docs/latest/concepts/authentication.html
# Extract the JWT token from the request's jwt query
# parameter or the authorization header.
token = request.GET.get("jwt")
if token is None:
raise ApiError("No token parameter")
# Decode the JWT token, without verification. This gives
# you a header JSON object, a claims JSON object, and a signature.
decoded = jwt.decode(token, verify=False)
# Extract the issuer ('iss') claim from the decoded, unverified
# claims object. This is the clientKey for the tenant - an identifier
# for the Atlassian application making the call
issuer = decoded["iss"]
# Look up the sharedSecret for the clientKey, as stored
# by the add-on during the installation handshake
from sentry_plugins.jira_ac.models import JiraTenant
jira_auth = JiraTenant.objects.get(client_key=issuer)
# Verify the signature with the sharedSecret and
# the algorithm specified in the header's alg field.
decoded_verified = jwt.decode(token, jira_auth.secret)
# Verify the query has not been tampered by Creating a Query Hash
# and comparing it against the qsh claim on the verified token.
# TODO: probably shouldn't need to hardcode get... for post maybe
# the secret should just be a hidden field in the form ?
qsh = get_query_hash(request.path, "GET", request.GET)
# qsh = get_query_hash(request.path, request.method, request.GET)
if qsh != decoded_verified["qsh"]:
raise ApiError("Query hash mismatch")
return jira_auth
|
CarterBain/Medici
|
ib/ext/EReader.py
|
Python
|
bsd-3-clause
| 38,033
| 0.001052
|
#!/usr/bin/env python
""" generated source for module EReader """
#
# Original file copyright original author(s).
# This file copyright Troy Melhase, troy@gci.net.
#
# WARNING: all changes to this file will be lost.
from ib.lib import Boolean, Double, DataInputStream, Integer, Long, StringBuffer, Thread
from ib.lib.overloading import overloaded
from ib.ext.Contract import Contract
from ib.ext.ContractDetails import ContractDetails
from ib.ext.ComboLeg import ComboLeg
from ib.ext.CommissionReport import CommissionReport
from ib.ext.EClientErrors import EClientErrors
from ib.ext.Execution import Execution
from ib.ext.Order import Order
from ib.ext.OrderComboLeg import OrderComboLeg
from ib.ext.OrderState import OrderState
from ib.ext.TagValue import TagValue
from ib.ext.TickType import TickType
from ib.ext.UnderComp import UnderComp
from ib.ext.Util import Util
#
# * EReader.java
# *
#
# package: com.ib.client
class EReader(Thread):
""" generated source for class EReader """
# incoming msg id's
TICK_PRICE = 1
TICK_SIZE = 2
ORDER_STATUS = 3
ERR_MSG = 4
OPEN_ORDER = 5
ACCT_VALUE = 6
PORTFOLIO_VALUE = 7
ACCT_UPDATE_TIME = 8
NEXT_VALID_ID = 9
CONTRACT_DATA = 10
EXECUTION_DATA = 11
MARKET_DEPTH = 12
MARKET_DEPTH_L2 = 13
NEWS_BULLETINS = 14
MANAGED_ACCTS = 15
RECEIVE_FA = 16
HISTORICAL_DATA = 17
BOND_CONTRACT_DATA = 18
SCANNER_PARAMETERS = 19
SCANNER_DA
|
TA = 20
TICK_OPTION_COMPUTATION = 21
TICK_GENERIC = 45
TICK_STRING = 46
TICK_EFP = 47
CURRENT_T
|
IME = 49
REAL_TIME_BARS = 50
FUNDAMENTAL_DATA = 51
CONTRACT_DATA_END = 52
OPEN_ORDER_END = 53
ACCT_DOWNLOAD_END = 54
EXECUTION_DATA_END = 55
DELTA_NEUTRAL_VALIDATION = 56
TICK_SNAPSHOT_END = 57
MARKET_DATA_TYPE = 58
COMMISSION_REPORT = 59
m_parent = None
m_dis = None
def parent(self):
""" generated source for method parent """
return self.m_parent
def eWrapper(self):
""" generated source for method eWrapper """
return self.parent().wrapper()
@overloaded
def __init__(self, parent, dis):
""" generated source for method __init__ """
self.__init__("EReader", parent, dis)
@__init__.register(object, str, object, DataInputStream)
def __init___0(self, name, parent, dis):
""" generated source for method __init___0 """
Thread.__init__(self, name, parent, dis)
self.setName(name)
self.m_parent = parent
self.m_dis = dis
def run(self):
""" generated source for method run """
try:
# loop until thread is terminated
while not self.isInterrupted() and self.processMsg(self.readInt()):
pass
except Exception as ex:
if self.parent().isConnected():
self.eWrapper().error(ex)
if self.parent().isConnected():
self.m_parent.close()
# Overridden in subclass.
def processMsg(self, msgId):
""" generated source for method processMsg """
if msgId == -1:
return False
if msgId == self.TICK_PRICE:
version = self.readInt()
tickerId = self.readInt()
tickType = self.readInt()
price = self.readDouble()
size = 0
if version >= 2:
size = self.readInt()
canAutoExecute = 0
if version >= 3:
canAutoExecute = self.readInt()
self.eWrapper().tickPrice(tickerId, tickType, price, canAutoExecute)
if version >= 2:
# not a tick
sizeTickType = -1
if tickType == 1:
# BID
sizeTickType = 0
# BID_SIZE
elif tickType == 2:
# ASK
sizeTickType = 3
# ASK_SIZE
elif tickType == 4:
# LAST
sizeTickType = 5
# LAST_SIZE
if sizeTickType != -1:
self.eWrapper().tickSize(tickerId, sizeTickType, size)
elif msgId == self.TICK_SIZE:
version = self.readInt()
tickerId = self.readInt()
tickType = self.readInt()
size = self.readInt()
self.eWrapper().tickSize(tickerId, tickType, size)
elif msgId == self.TICK_OPTION_COMPUTATION:
version = self.readInt()
tickerId = self.readInt()
tickType = self.readInt()
impliedVol = self.readDouble()
if impliedVol < 0: # -1 is the "not yet computed" indicator
impliedVol = Double.MAX_VALUE
delta = self.readDouble()
if abs(delta) > 1: # -2 is the "not yet computed" indicator
delta = Double.MAX_VALUE
optPrice = Double.MAX_VALUE
pvDividend = Double.MAX_VALUE
gamma = Double.MAX_VALUE
vega = Double.MAX_VALUE
theta = Double.MAX_VALUE
undPrice = Double.MAX_VALUE
if version >= 6 or (tickType == TickType.MODEL_OPTION):
# introduced in version == 5
optPrice = self.readDouble()
if optPrice < 0: # -1 is the "not yet computed" indicator
optPrice = Double.MAX_VALUE
pvDividend = self.readDouble()
if pvDividend < 0: # -1 is the "not yet computed" indicator
pvDividend = Double.MAX_VALUE
if version >= 6:
gamma = self.readDouble()
if abs(gamma) > 1: # -2 is the "not yet computed" indicator
gamma = Double.MAX_VALUE
vega = self.readDouble()
if abs(vega) > 1: # -2 is the "not yet computed" indicator
vega = Double.MAX_VALUE
theta = self.readDouble()
if abs(theta) > 1: # -2 is the "not yet computed" indicator
theta = Double.MAX_VALUE
undPrice = self.readDouble()
if undPrice < 0: # -1 is the "not yet computed" indicator
undPrice = Double.MAX_VALUE
self.eWrapper().tickOptionComputation(tickerId, tickType, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice)
elif msgId == self.TICK_GENERIC:
version = self.readInt()
tickerId = self.readInt()
tickType = self.readInt()
value = self.readDouble()
self.eWrapper().tickGeneric(tickerId, tickType, value)
elif msgId == self.TICK_STRING:
version = self.readInt()
tickerId = self.readInt()
tickType = self.readInt()
value = self.readStr()
self.eWrapper().tickString(tickerId, tickType, value)
elif msgId == self.TICK_EFP:
version = self.readInt()
tickerId = self.readInt()
tickType = self.readInt()
basisPoints = self.readDouble()
formattedBasisPoints = self.readStr()
impliedFuturesPrice = self.readDouble()
holdDays = self.readInt()
futureExpiry = self.readStr()
dividendImpact = self.readDouble()
dividendsToExpiry = self.readDouble()
self.eWrapper().tickEFP(tickerId, tickType, basisPoints, formattedBasisPoints, impliedFuturesPrice, holdDays, futureExpiry, dividendImpact, dividendsToExpiry)
elif msgId == self.ORDER_STATUS:
version = self.readInt()
id = self.readInt()
status = self.readStr()
filled = self.readInt()
remaining = self.readInt()
avgFillPrice = self.readDouble()
permId = 0
if version >= 2:
permId = self.readInt()
parentId = 0
if version >= 3:
parentId = self.readInt()
lastFillPrice = 0
if version >= 4:
lastFillPrice
|
charanpald/features
|
setup.py
|
Python
|
gpl-3.0
| 474
| 0.004219
|
#!/usr/bin/env python
from setuptools import setup
se
|
tup(name='features',
version='0.1',
description='A collection of feature extraction/selection algorithms',
author='Charanpal Dhanjal',
author_email='charanpal@gmail.com',
url='https://github.com/charanpald/features',
install_requires=['numpy>=1.5.0', 'scipy>=0.7.1'],
platforms=["OS Independent"],
packages=['features', 'kernel', "features.test", "kernel.test"],
|
)
|
samyoyo/3vilTwinAttacker
|
Modules/ModuleProbeRequest.py
|
Python
|
mit
| 4,383
| 0.013461
|
#The MIT License (MIT)
#Copyright (c) 2015-2016 mh4x0f P0cL4bs Team
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from PyQt4.QtGui import *
from re import search
from os import system,geteuid,getuid,popen
from Core.Settings import frm_Settings
from Modules.utils import Refactor,set_monitor_mode
from subprocess import Popen,PIPE
from scapy.all import *
class frm_Probe(QMainWindow):
def __init__(self, parent=None):
super(frm_Probe, self).__init__(parent)
self.form_widget = frm_PMonitor(self)
self.setCentralWidget(self.form_widget)
self.setWindowIcon(QIcon('rsc/icon.ico'))
c
|
lass frm_PMonitor(QWidget):
def __init__(self, parent=None):
super(frm_PMonitor, self).__init__(parent)
self.Main = QVBoxLayout()
self.setWindowTitle("Probe Request wifi Monitor")
self.setWindowIcon(QIcon('rsc/icon.ico'))
self.confi
|
g = frm_Settings()
self.interface = str(self.config.xmlSettings("interface", "monitor_mode", None, False))
self.loadtheme(self.config.XmlThemeSelected())
self.setupGUI()
def loadtheme(self,theme):
sshFile=("Core/%s.qss"%(theme))
with open(sshFile,"r") as fh:
self.setStyleSheet(fh.read())
def setupGUI(self):
self.form0 = QFormLayout()
self.list_probe = QListWidget()
self.list_probe.setFixedHeight(300)
self.btn_scan = QPushButton("Scan")
self.btn_scan.clicked.connect(self.Pro_request)
self.btn_scan.setIcon(QIcon("rsc/network.png"))
self.get_placa = QComboBox(self)
n = Refactor.get_interfaces()['all']
for i,j in enumerate(n):
if search("wlan", j):
self.get_placa.addItem(n[i])
self.time_scan = QComboBox(self)
self.time_scan.addItems(["10s","20s","30s"])
self.form0.addRow("Network Adapter: ", self.get_placa)
self.form0.addRow(self.list_probe)
self.form0.addRow("Time Scan: ", self.time_scan)
self.form1 = QFormLayout()
self.form1.addRow(self.btn_scan)
self.Main.addLayout(self.form0)
self.Main.addLayout(self.form1)
self.setLayout(self.Main)
def Pro_request(self):
self.time_control = None
if self.time_scan.currentText() == "10s":self.time_control = 300
elif self.time_scan.currentText() == "20s":self.time_control = 400
elif self.time_scan.currentText() == "30s":self.time_control = 600
if self.get_placa.currentText() == "":
QMessageBox.information(self, "Network Adapter", 'Network Adapter Not found try again.')
return
out = popen('iwconfig').readlines()
for i in out:
if search('Mode:Monitor', i):
self.interface = i.split()[0]
sniff(iface=self.interface,prn=self.sniff_probe, count=self.time_control)
return
set_monitor_mode(self.get_placa.currentText()).setEnable()
sniff(iface=self.interface,prn=self.sniff_probe, count=self.time_control)
def sniff_probe(self,p):
if (p.haslayer(Dot11ProbeReq)):
mac_address=(p.addr2)
ssid=p[Dot11Elt].info
ssid=ssid.decode('utf-8','ignore')
if ssid == "":ssid="null"
self.list_probe.addItem("[:] Probe Request from %s for SSID '%s'" %(mac_address,ssid))
|
hoelsner/product-database
|
app/productdb/models.py
|
Python
|
mit
| 40,316
| 0.002828
|
import hashlib
import re
from collections import Counter
from datetime import timedelta
from django.contrib.auth.models import User
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
from django.db import models
from django.db.models import Q
from django.db.models.signals import pre_delete, post_save, pre_save, post_delete
from django.dispatch import receiver
from django.utils.timezone import datetime, now
from app.config.settings import AppSettings
from app.productdb.validators import validate_product_list_string
from app.productdb import utils
CURRENCY_CHOICES = (
('EUR', 'Euro'),
('USD', 'US-Dollar'),
)
class JobFile(models.Model):
"""Uploaded files for tasks"""
file = models.FileField(upload_to="data")
@receiver(pre_delete, sender=JobFile)
def delete_job_file(sender, instance, **kwargs):
"""remove the file from the disk if the Job File object is deleted"""
instance.file.delete(False)
class Vendor(models.Model):
"""
Vendor
"""
name = models.CharField(
max_length=128,
unique=True
)
def __str__(self):
return self.name
def delete(self, using=None, **kwargs):
# prevent the deletion of the "unassigned" value from model
if self.id == 0:
raise Exception("Operation not allowed")
super().delete(using)
def save(self, **kwargs):
# clean the object before save
self.full_clean()
super(Vendor, self).save(**kwargs)
class Meta:
verbose_name = "vendor"
verbose_name_plural = "vendors"
ordering = ('name',)
class ProductGroup(models.Model):
"""
Product Group
"""
name = models.CharField(
max_length=512,
help_text="Name of the Product Group"
)
vendor = models.ForeignKey(
Vendor,
blank=False,
null=False,
default=0,
verbose_name="Vendor",
on_delete=models.SET_DEFAULT
)
def get_all_products(self):
"""returns a query set that contains all Products"""
result = None
if self.pk:
result = Product.objects.filter(product_group_id=self.pk)
if result.count() == 0:
result = None
return result
def save(self, *args, **kwargs):
# clean the object before save
self.full_clean()
super(ProductGroup, self).save(*args, **kwargs)
def clean(self):
# check that the Product Group Name already exist in the database for the given vendor
if ProductGroup.objects.filter(vendor=self.vendor, name=self.name).exists():
raise ValidationError({
"name": ValidationError("group name already defined for this vendor")
})
# verify that all associated Products have the same Vendor as the product list
associated_products = self.get_all_products()
import logging
logging.debug("Associated Products to %s: %s - %s" %(
self.name,
len(associated_products) if associated_products is not None else "0 (None)",
associated_products.values_list("product_id", flat=True) if associated_products is not None else "[]"
))
# if no products are associated to the group, no check is required
if associated_products:
products_with_different_vendor = [False for product in self.get_all_products()
if product.vendor != self.ve
|
ndor]
if len(products_with_different_vendor) != 0:
raise ValidationError({
"vendor": ValidationError("cannot set new vendor as long as there are products associated to it")
})
def __str__(self):
return self.name
class Meta:
verbose_name = "Product Group"
verbose_name_plural = "Product Groups"
unique_together = ("name", "vendor")
class Product(models.Model):
END_OF_SUPPORT_STR = "End o
|
f Support"
END_OF_SALE_STR = "End of Sale"
END_OF_NEW_SERVICE_ATTACHMENT_STR = "End of New Service Attachment Date"
END_OF_SW_MAINTENANCE_RELEASES_STR = "End of SW Maintenance Releases Date"
END_OF_ROUTINE_FAILURE_ANALYSIS_STR = "End of Routine Failure Analysis Date"
END_OF_SERVICE_CONTRACT_RENEWAL_STR = "End of Service Contract Renewal Date"
END_OF_VUL_SUPPORT_STR = "End of Vulnerability/Security Support date"
EOS_ANNOUNCED_STR = "EoS announced"
NO_EOL_ANNOUNCEMENT_STR = "No EoL announcement"
# preference greater than the following constant is considered preferred
LESS_PREFERRED_PREFERENCE_VALUE = 25
product_id = models.CharField(
unique=False,
max_length=512,
help_text="Product ID/Number"
)
description = models.TextField(
default="",
blank=True,
null=True,
help_text="description of the product"
)
list_price = models.FloatField(
null=True,
blank=True,
verbose_name="list price",
help_text="list price of the element",
validators=[MinValueValidator(0)]
)
currency = models.CharField(
max_length=16,
choices=CURRENCY_CHOICES,
default="USD",
verbose_name="currency",
help_text="currency of the list price"
)
tags = models.TextField(
default="",
blank=True,
null=True,
verbose_name="Tags",
help_text="unstructured tag field"
)
vendor = models.ForeignKey(
Vendor,
blank=False,
null=False,
default=0,
verbose_name="Vendor",
on_delete=models.SET_DEFAULT
)
eox_update_time_stamp = models.DateField(
null=True,
blank=True,
verbose_name="EoX lifecycle data timestamp",
help_text="Indicates that the product has lifecycle data and when they were updated. If no "
"EoL announcement date is set but an update timestamp, the product is considered as not EoL/EoS."
)
eol_ext_announcement_date = models.DateField(
null=True,
blank=True,
verbose_name="End-of-Life Announcement Date"
)
end_of_sale_date = models.DateField(
null=True,
blank=True,
verbose_name="End-of-Sale Date"
)
end_of_new_service_attachment_date = models.DateField(
null=True,
blank=True,
verbose_name="End of New Service Attachment Date"
)
end_of_sw_maintenance_date = models.DateField(
null=True,
blank=True,
verbose_name="End of SW Maintenance Releases Date"
)
end_of_routine_failure_analysis = models.DateField(
null=True,
blank=True,
verbose_name="End of Routine Failure Analysis Date"
)
end_of_service_contract_renewal = models.DateField(
null=True,
blank=True,
verbose_name="End of Service Contract Renewal Date"
)
end_of_support_date = models.DateField(
null=True,
blank=True,
verbose_name="Last Date of Support",
)
end_of_sec_vuln_supp_date = models.DateField(
null=True,
blank=True,
verbose_name="End of Vulnerability/Security Support date"
)
eol_reference_number = models.CharField(
max_length=2048,
null=True,
blank=True,
verbose_name="EoL reference number",
help_text="Product bulletin number or vendor specific reference for EoL"
)
eol_reference_url = models.URLField(
null=True,
blank=True,
max_length=1024,
verbose_name="EoL reference URL",
help_text="URL to the Product bulletin or EoL reference"
)
product_group = models.ForeignKey(
ProductGroup,
null=True,
blank=True,
verbose_name="Product Group",
on_delete=models.SET_NULL
)
lc_state_sync = models.BooleanField(
def
|
qiime2/q2-types
|
q2_types/sample_data/_type.py
|
Python
|
bsd-3-clause
| 832
| 0
|
# -----------------------------------------------------------
|
-----------------
# Copyright (c) 2016-2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
|
# ----------------------------------------------------------------------------
from qiime2.plugin import SemanticType
from ..plugin_setup import plugin
from . import AlphaDiversityDirectoryFormat
SampleData = SemanticType('SampleData', field_names='type')
AlphaDiversity = SemanticType('AlphaDiversity',
variant_of=SampleData.field['type'])
plugin.register_semantic_types(SampleData, AlphaDiversity)
plugin.register_semantic_type_to_format(
SampleData[AlphaDiversity],
artifact_format=AlphaDiversityDirectoryFormat
)
|
DigitalCampus/django-oppia
|
tests/test_course_upload.py
|
Python
|
gpl-3.0
| 14,057
| 0.001636
|
import pytest
from django.urls import reverse
from gamification.models import CourseGamificationEvent, \
MediaGamificationEvent, \
ActivityGamificationEvent
from oppia.test import OppiaTestCase
from oppia.models import Course, CoursePublishingLog, Quiz, Activity, Question
from zipfile import BadZipfile
from quiz.models import QuizProps, QuestionProps
class CourseUploadTest(OppiaTestCase):
fixtures = ['tests/test_user.json',
'tests/test_oppia.json',
'tests/test_quiz.json',
'tests/test_permissions.json',
'tests/test_course_permissions.json']
file_root = './oppia/fixtures/reference_files/'
course_file_path = file_root + 'ncd1_test_course.zip'
media_file_path = file_root + 'sample_video.m4v'
empty_section_course = file_root + 'test_course_empty_section.zip'
no_module_xml = file_root + 'test_course_no_module_xml.zip'
corrupt_course_zip = file_root + 'corrupt_course.zip'
course_no_sub_dir = file_root + 'test_course_no_sub_dir.zip'
course_old_version = file_root + 'ncd1_old_course.zip'
course_no_activities = file_root + 'test_course_no_activities.zip'
course_with_custom_points = file_root + 'ref-1.zip'
course_with_copied_activities = file_root + 'ref-1-copy.zip'
course_with_custom_points_updated = file_root + 'ref-1-updated.zip'
course_with_quizprops = file_root + 'quizprops_course.zip'
course_with_updated_quizprops = file_root + 'quizprops_course_updated.zip'
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_upload_template(self):
with open(self.course_file_path, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
# should be redirected to the update step 2 form
self.assertRedirects(response,
reverse('oppia:upload_step2', args=[2]),
302,
200)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_upload_with_empty_sections(self):
with open(self.empty_section_course, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
course = Course.objects.latest('created_date')
# should be redirected to the update step 2 form
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_upload_no_module_xml(self):
with open(self.no_module_xml, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("no_module_xml", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_corrupt_course(self):
with open(self.corrupt_course_zip, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
self.assertRaises(BadZipfile)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("invalid_zip", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_no_sub_dir(self):
with open(self.course_no_sub_dir, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("invalid_zip", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_newer_version_exists(self):
with open(self.course_old_version, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("newer_version_exists", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_course_no_activities(self):
with open(self.course_no_activities, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("no_activities", course_log.action)
@pytest.
|
mark.xfail(reason="works on local but not on github workflows")
def test_course_with_custom_points(self):
course_game_events_start = CourseGamificationEvent. \
objects.all().count()
media_game_events_start = MediaGamificationEvent. \
objects.all().
|
count()
activity_game_events_start = ActivityGamificationEvent. \
objects.all().count()
with open(self.course_with_custom_points, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
course = Course.objects.latest('created_date')
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
course_game_events_end = CourseGamificationEvent.objects.all().count()
self.assertEqual(course_game_events_start+10, course_game_events_end)
media_game_events_end = MediaGamificationEvent.objects.all().count()
self.assertEqual(media_game_events_start+4, media_game_events_end)
activity_game_events_end = ActivityGamificationEvent. \
objects.all().count()
self.assertEqual(activity_game_events_start+1,
activity_game_events_end)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_course_with_custom_points_updated(self):
with open(self.course_with_custom_points, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
course = Course.objects.latest('created_date')
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
course_game_events_start = CourseGamificationEvent. \
objects.all().count()
media_game_events_start = MediaGamificationEvent. \
objects.all().count()
activity_g
|
e0ne/cinder
|
cinder/tests/test_smbfs.py
|
Python
|
apache-2.0
| 22,226
| 0
|
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import copy
import os
import mock
from cinder import exception
from cinder.image import image_utils
from cinder import test
from cinder.volume.drivers import smbfs
class SmbFsTestCase(test.TestCase):
_FAKE_SHARE = '//1.2.3.4/share1'
_FAKE_MNT_BASE = '/mnt'
_FAKE_VOLUME_NAME = 'volume-4f711859-4928-4cb7-801a-a50c37ceaccc'
_FAKE_TOTAL_SIZE = '2048'
_FAKE_TOT
|
AL_AVAILABLE = '1024'
_FAKE_TOTAL_ALLOCATED = 1024
_F
|
AKE_VOLUME = {'id': '4f711859-4928-4cb7-801a-a50c37ceaccc',
'size': 1,
'provider_location': _FAKE_SHARE,
'name': _FAKE_VOLUME_NAME,
'status': 'available'}
_FAKE_MNT_POINT = os.path.join(_FAKE_MNT_BASE, 'fake_hash')
_FAKE_VOLUME_PATH = os.path.join(_FAKE_MNT_POINT, _FAKE_VOLUME_NAME)
_FAKE_SNAPSHOT_ID = '5g811859-4928-4cb7-801a-a50c37ceacba'
_FAKE_SNAPSHOT = {'id': _FAKE_SNAPSHOT_ID,
'volume': _FAKE_VOLUME,
'status': 'available',
'volume_size': 1}
_FAKE_SNAPSHOT_PATH = (
_FAKE_VOLUME_PATH + '-snapshot' + _FAKE_SNAPSHOT_ID)
_FAKE_SHARE_OPTS = '-o username=Administrator,password=12345'
_FAKE_OPTIONS_DICT = {'username': 'Administrator',
'password': '12345'}
_FAKE_LISTDIR = [_FAKE_VOLUME_NAME, _FAKE_VOLUME_NAME + '.vhd',
_FAKE_VOLUME_NAME + '.vhdx', 'fake_folder']
_FAKE_SMBFS_CONFIG = mock.MagicMock()
_FAKE_SMBFS_CONFIG.smbfs_oversub_ratio = 2
_FAKE_SMBFS_CONFIG.smbfs_used_ratio = 0.5
_FAKE_SMBFS_CONFIG.smbfs_shares_config = '/fake/config/path'
_FAKE_SMBFS_CONFIG.smbfs_default_volume_format = 'raw'
_FAKE_SMBFS_CONFIG.smbfs_sparsed_volumes = False
def setUp(self):
super(SmbFsTestCase, self).setUp()
smbfs.SmbfsDriver.__init__ = lambda x: None
self._smbfs_driver = smbfs.SmbfsDriver()
self._smbfs_driver._remotefsclient = mock.Mock()
self._smbfs_driver._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
self._smbfs_driver._execute = mock.Mock()
self._smbfs_driver.base = self._FAKE_MNT_BASE
def test_delete_volume(self):
drv = self._smbfs_driver
fake_vol_info = self._FAKE_VOLUME_PATH + '.info'
drv._ensure_share_mounted = mock.MagicMock()
fake_ensure_mounted = drv._ensure_share_mounted
drv._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
drv.get_active_image_from_info = mock.Mock(
return_value=self._FAKE_VOLUME_NAME)
drv._delete = mock.Mock()
drv._local_path_volume_info = mock.Mock(
return_value=fake_vol_info)
with mock.patch('os.path.exists', lambda x: True):
drv.delete_volume(self._FAKE_VOLUME)
fake_ensure_mounted.assert_called_once_with(self._FAKE_SHARE)
drv._delete.assert_any_call(
self._FAKE_VOLUME_PATH)
drv._delete.assert_any_call(fake_vol_info)
def _test_setup(self, config, share_config_exists=True):
fake_exists = mock.Mock(return_value=share_config_exists)
fake_ensure_mounted = mock.MagicMock()
self._smbfs_driver._ensure_shares_mounted = fake_ensure_mounted
self._smbfs_driver.configuration = config
with mock.patch('os.path.exists', fake_exists):
if not (config.smbfs_shares_config and share_config_exists and
config.smbfs_oversub_ratio > 0 and
0 <= config.smbfs_used_ratio <= 1):
self.assertRaises(exception.SmbfsException,
self._smbfs_driver.do_setup,
None)
else:
self._smbfs_driver.do_setup(None)
self.assertEqual(self._smbfs_driver.shares, {})
fake_ensure_mounted.assert_called_once()
def test_setup_missing_shares_config_option(self):
fake_config = copy.copy(self._FAKE_SMBFS_CONFIG)
fake_config.smbfs_shares_config = None
self._test_setup(fake_config, None)
def test_setup_missing_shares_config_file(self):
self._test_setup(self._FAKE_SMBFS_CONFIG, False)
def test_setup_invlid_oversub_ratio(self):
fake_config = copy.copy(self._FAKE_SMBFS_CONFIG)
fake_config.smbfs_oversub_ratio = -1
self._test_setup(fake_config)
def test_setup_invalid_used_ratio(self):
fake_config = copy.copy(self._FAKE_SMBFS_CONFIG)
fake_config.smbfs_used_ratio = -1
self._test_setup(fake_config)
def _test_create_volume(self, volume_exists=False, volume_format=None):
fake_method = mock.MagicMock()
self._smbfs_driver.configuration = copy.copy(self._FAKE_SMBFS_CONFIG)
self._smbfs_driver._set_rw_permissions_for_all = mock.MagicMock()
fake_set_permissions = self._smbfs_driver._set_rw_permissions_for_all
self._smbfs_driver.get_volume_format = mock.MagicMock()
windows_image_format = False
fake_vol_path = self._FAKE_VOLUME_PATH
self._smbfs_driver.get_volume_format.return_value = volume_format
if volume_format:
if volume_format in ('vhd', 'vhdx'):
windows_image_format = volume_format
if volume_format == 'vhd':
windows_image_format = 'vpc'
method = '_create_windows_image'
fake_vol_path += '.' + volume_format
else:
method = '_create_%s_file' % volume_format
if volume_format == 'sparsed':
self._smbfs_driver.configuration.smbfs_sparsed_volumes = (
True)
else:
method = '_create_regular_file'
setattr(self._smbfs_driver, method, fake_method)
with mock.patch('os.path.exists', new=lambda x: volume_exists):
if volume_exists:
self.assertRaises(exception.InvalidVolume,
self._smbfs_driver._do_create_volume,
self._FAKE_VOLUME)
return
self._smbfs_driver._do_create_volume(self._FAKE_VOLUME)
if windows_image_format:
fake_method.assert_called_once_with(
fake_vol_path,
self._FAKE_VOLUME['size'],
windows_image_format)
else:
fake_method.assert_called_once_with(
fake_vol_path, self._FAKE_VOLUME['size'])
fake_set_permissions.assert_called_once_with(fake_vol_path)
def test_create_existing_volume(self):
self._test_create_volume(volume_exists=True)
def test_create_vhdx(self):
self._test_create_volume(volume_format='vhdx')
def test_create_qcow2(self):
self._test_create_volume(volume_format='qcow2')
def test_create_sparsed(self):
self._test_create_volume(volume_format='sparsed')
def test_create_regular(self):
self._test_create_volume()
def _test_find_share(self, existing_mounted_shares=True,
eligible_shares=True):
if existing_mounted_shares:
mounted_shares = ('fake_share1', 'fake_share2', 'fake_share3')
else:
mounted_shares = None
self._smbfs_driver._mounted_shares = mounted_shares
self._smbfs_driver._is_share_eligible = mock.Mock(
return_value=eligible_sha
|
regardscitoyens/sunshine-data
|
scripts/format_pharmaciens.py
|
Python
|
agpl-3.0
| 1,072
| 0.001866
|
# -*- coding: utf-8 -*-
import pandas as pd
import sys
from builtins import str as text
from utils import find_zipcode, str2date
header_mapping = {
'origin': 'ORIGIN',
'company_name': 'LABO',
'lastname_firstname': 'BENEF_PS_QUALITE_NOM_PRENOM',
'address': 'BENEF_PS_ADR',
'job': 'BENEF_PS_QUALIFICATION',
'rpps': '
|
BENEF_PS_RPPS',
'value': 'DECL_AVANT_MONTANT',
'date': 'DECL_AVANT_DATE',
'kind': 'DECL_AVANT_NATURE',
'BENEF_PS_CODEPOSTAL': 'BENEF_PS_CODEPOSTAL'
}
input_filename = sys.argv[1]
output_filename = sys.argv[2]
df = pd.read_csv(input_filename, encoding
|
='utf-8')
df['lastname_firstname'] = df['name'] + ' ' + df['firstname']
df['origin'] = 'Pharmacien'
df['date'] = df['date'].apply(str2date)
df['BENEF_PS_CODEPOSTAL'] = df['address'].apply(find_zipcode)
for origin, target in header_mapping.items():
df[target] = df[origin]
df[target] = df[target].apply(text).apply(lambda s: s.replace(',', '- ').replace('"', ''))
df[list(header_mapping.values())].to_csv(output_filename, index=False, encoding='utf-8')
|
giuserpe/leeno
|
src/Ultimus.oxt/python/pythonpath/LeenoPdf.py
|
Python
|
lgpl-2.1
| 8,880
| 0.003154
|
import os
import LeenoUtils
import DocUtils
import SheetUtils
import Dialogs
import LeenoSettings
import LeenoConfig
_EXPORTSETTINGSITEMS = (
'npElencoPrezzi',
'npComputoMetrico',
'npCostiManodopera',
'npQuadroEconomico',
'cbElencoPrezzi',
'cbComputoMetrico',
'cbCostiManodopera',
'cbQuadroEconomico',
)
def loadExportSettings(oDoc):
cfg = LeenoConfig.Config()
data = DocUtils.loadDataBlock(oDoc, 'ImpostazioniExport')
if data is None or len(data) == 0:
data = cfg.readBlock('ImpostazioniExport', True)
return data
def storeExportSettings(oDoc, es):
cfg = LeenoConfig.Config()
DocUtils.storeDataBlock(oDoc, 'ImpostazioniExport', es)
cfg.writeBlock('ImpostazioniExport', es, True)
def prepareCover(oDoc, nDoc, docSubst):
'''
prepare cover page, if there's one
copy to nDoc document and fill it's data
return true if we've got a cover, false otherwise
docSubst is a dictionary with additional variable replacements
mostly used for [PAGINE], [OGGETTO] and [NUMERO_DOCUMENTO]
which are document dependent data
'''
# load print settings and look for cover
data, covers = LeenoSettings.loadPrintSettings(oDoc)
fileCopertine = data.get('fileCopertine')
copertina = data.get('copertina')
if fileCopertine is None or copertina is None:
return False
if fileCopertine == '' or copertina == '':
return False
cDoc = DocUtils.loadDocument(fileCopertine)
if cDoc is None:
return False
if not copertina in cDoc.Sheets:
cDoc.close(False)
del cDoc
return False
# we need to copy page style too
sheet = cDoc.Sheets[copertina]
pageStyle = sheet.PageStyle
if pageStyle is not None and pageStyle != '':
print("PAGE HAS STYLE")
pageStyles = cDoc.StyleFamilies.getByName('PageStyles')
style = pageStyles.getByName(pageStyle)
SheetUtils.copyPageStyle(nDoc, style)
# cover is OK, copy to new document
pos = nDoc.Sheets.Count
nDoc.Sheets.importSheet(cDoc, copertina, pos)
# if page has a print area, copy it too...
nDoc.Sheets[pos].PageStyle = sheet.PageStyle
if len(sh
|
eet.PrintAreas) > 0:
print("PAGE HAS PRINT AREA")
nDoc.Sheets[pos].P
|
rintAreas = sheet.PrintAreas
# replaces all placeholders with settings ones
settings = LeenoSettings.loadPageReplacements(oDoc)
for key, val in docSubst.items():
settings[key] = val
SheetUtils.replaceText(nDoc.Sheets[pos], settings)
# close cover document and return
cDoc.close(False)
del cDoc
return True
def prepareHeaderFooter(oDoc, docSubst):
res = {}
# load print settings, we need header and footer data
printSettings, dummy = LeenoSettings.loadPrintSettings(oDoc)
# load replacement templates
replDict = LeenoSettings.loadPageReplacements(oDoc)
for key, val in docSubst.items():
replDict[key] = val
# replace placeholders
for psKey in ('intSx', 'intCenter', 'intDx', 'ppSx', 'ppCenter', 'ppDx'):
if psKey in printSettings:
psVal = printSettings[psKey]
for replKey, replVal in replDict.items():
# pagination needs some extra steps
if replKey in ('[PAGINA]', '[PAGINE]'):
continue
while replKey in psVal:
psVal = psVal.replace(replKey, replVal)
res[psKey] = psVal
return res
def PdfDialog():
# dimensione verticale dei checkbox == dimensione bottoni
#dummy, hItems = Dialogs.getButtonSize('', Icon="Icons-24x24/settings.png")
nWidth, hItems = Dialogs.getEditBox('aa')
# dimensione dell'icona col PDF
imgW = Dialogs.getBigIconSize()[0] * 2
return Dialogs.Dialog(Title='Esportazione documenti PDF', Horz=False, CanClose=True, Items=[
Dialogs.HSizer(Items=[
Dialogs.VSizer(Items=[
Dialogs.Spacer(),
Dialogs.ImageControl(Image='Icons-Big/pdf.png', MinWidth=imgW),
Dialogs.Spacer(),
]),
Dialogs.VSizer(Items=[
Dialogs.FixedText(Text='Tavola'),
Dialogs.Spacer(),
Dialogs.Edit(Id='npElencoPrezzi', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
Dialogs.Spacer(),
Dialogs.Edit(Id='npComputoMetrico', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
Dialogs.Spacer(),
Dialogs.Edit(Id='npCostiManodopera', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
Dialogs.Spacer(),
Dialogs.Edit(Id='npQuadroEconomico', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
]),
Dialogs.Spacer(),
Dialogs.VSizer(Items=[
Dialogs.FixedText(Text='Oggetto'),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbElencoPrezzi", Label="Elenco prezzi", FixedHeight=hItems),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbComputoMetrico", Label="Computo metrico", FixedHeight=hItems),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbCostiManodopera", Label="Costi manodopera", FixedHeight=hItems),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbQuadroEconomico", Label="Quadro economico", FixedHeight=hItems),
]),
Dialogs.Spacer(),
]),
Dialogs.Spacer(),
Dialogs.Spacer(),
Dialogs.FixedText(Text='Cartella di destinazione:'),
Dialogs.Spacer(),
Dialogs.PathControl(Id="pathEdit"),
Dialogs.Spacer(),
Dialogs.HSizer(Items=[
Dialogs.Spacer(),
Dialogs.Button(Label='Ok', MinWidth=Dialogs.MINBTNWIDTH, Icon='Icons-24x24/ok.png', RetVal=1),
Dialogs.Spacer(),
Dialogs.Button(Label='Annulla', MinWidth=Dialogs.MINBTNWIDTH, Icon='Icons-24x24/cancel.png', RetVal=-1),
Dialogs.Spacer()
])
])
def PdfElencoPrezzi(destFolder, nTavola):
oDoc = LeenoUtils.getDocument()
ep = oDoc.Sheets.getByName('Elenco Prezzi')
# lancia l'export
nDoc = str(nTavola)
baseName = ''
if nDoc != '' and nDoc is not None:
baseName = nDoc + '-'
destPath = os.path.join(destFolder, baseName + 'ElencoPrezzi.pdf')
print(f"Export to '{destPath}' file")
selection = [ep, ]
docSubst = {
'[OGGETTO]':'Elenco Prezzi',
'[NUMERO_DOCUMENTO]': str(nTavola),
}
headerFooter = prepareHeaderFooter(oDoc, docSubst)
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
# purtropp non c'è alcun modo di determinare in anticipo il numero di pagine, indi
# dobbiamo creare il PDF, usare una funzione per contarle, e ricrearlo di nuovo
# meraviglie di LibreOffice...
nPages = LeenoUtils.countPdfPages(destPath)
docSubst['[PAGINE]'] = nPages
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
def PdfComputoMetrico(destFolder, nTavola):
oDoc = LeenoUtils.getDocument()
ep = oDoc.Sheets.getByName('COMPUTO')
# lancia l'export
nDoc = str(nTavola)
baseName = ''
if nDoc != '' and nDoc is not None:
baseName = nDoc + '-'
destPath = os.path.join(destFolder, baseName + 'ComputoMetrico.pdf')
print(f"Export to '{destPath}' file")
selection = [ep, ]
docSubst = {
'[OGGETTO]':'Computo Metrico',
'[NUMERO_DOCUMENTO]': str(nTavola),
}
headerFooter = prepareHeaderFooter(oDoc, docSubst)
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
# purtropp non c'è alcun modo di determinare in anticipo il numero di pagine, indi
# dobbiamo creare il PDF, usare una funzione per contarle, e ricrearlo di nuovo
# meraviglie di LibreOffice...
nPages = LeenoUtils.countPdfPages(destPath)
docSubst['[PAGINE]'] = nPages
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oD
|
vup1120/oq-hazardlib
|
openquake/hazardlib/gsim/zhao_2006_swiss.py
|
Python
|
agpl-3.0
| 6,612
| 0.003479
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2014-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports
:class:`ZhaoEtAl2006AscSWISS05`,
:class:`ZhaoEtAl2006AscSWISS03`,
:class:`ZhaoEtAl2006AscSWISS08`.
"""
from __future__ import division
import numpy as np
from openquake.hazardlib.gsim.base import CoeffsTable
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGA, SA
from openquake.hazardlib.gsim.zhao_2006 import ZhaoEtAl2006Asc
from openquake.hazardlib.gsim.zhao_2006_swiss_coeffs import (
COEFFS_
|
FS_ROCK_SWISS05,
COEFFS_FS_ROCK_SWISS03,
COEFFS_FS_ROCK_SWISS08
)
from openquake.hazardlib.gsim.utils_swiss_gmpe import _apply_adjustments
class ZhaoEtAl2006AscSWISS05(ZhaoEtAl2006Asc):
"""
This class extends :class:ZhaoEtAl2006Asc,
adjusted to be used for the Swiss Hazard Model [2014].
This GMPE is valid for a fixed value of vs30=
|
700m/s
#. kappa value
K-adjustments corresponding to model 01 - as prepared by Ben Edwards
K-value for PGA were not provided but infered from SA[0.01s]
the model applies to a fixed value of vs30=700m/s to match the
reference vs30=1100m/s
#. small-magnitude correction
#. single station sigma - inter-event magnitude/distance adjustment
Disclaimer: these equations are modified to be used for the
Swiss Seismic Hazard Model [2014].
The hazard modeller is solely responsible for the use of this GMPE
in a different tectonic context.
Model implemented by laurentiu.danciu@gmail.com
"""
# Supported standard deviation type is only total, but reported as a
# combination of mean and magnitude/distance single station sigma
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([const.StdDev.TOTAL])
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGA,
SA
])
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
sites.vs30 = 700 * np.ones(len(sites.vs30))
mean, stddevs = super(ZhaoEtAl2006AscSWISS05, self).\
get_mean_and_stddevs(sites, rup, dists, imt, stddev_types)
tau_ss = 'tauC'
log_phi_ss = 1.00
C = ZhaoEtAl2006AscSWISS05.COEFFS_ASC
mean, stddevs = _apply_adjustments(
C, self.COEFFS_FS_ROCK[imt], tau_ss,
mean, stddevs, sites, rup, dists.rrup, imt, stddev_types,
log_phi_ss)
return mean, stddevs
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS05
#: Original Coefficient table
COEFFS_ASC = CoeffsTable(sa_damping=5, table="""\
IMT a b c d e FR CH C1 C2 C3 C4 sigma QC WC tauC
pga 1.101 -0.00564 0.0055 1.080 0.01412 0.251 0.293 1.111 1.344 1.355 1.420 0.604 0.0 0.0 0.303
0.05 1.076 -0.00671 0.0075 1.060 0.01463 0.251 0.939 1.684 1.793 1.747 1.814 0.640 0.0 0.0 0.326
0.10 1.118 -0.00787 0.0090 1.083 0.01423 0.240 1.499 2.061 2.135 2.031 2.082 0.694 0.0 0.0 0.342
0.15 1.134 -0.00722 0.0100 1.053 0.01509 0.251 1.462 1.916 2.168 2.052 2.113 0.702 0.0 0.0 0.331
0.20 1.147 -0.00659 0.0120 1.014 0.01462 0.260 1.280 1.669 2.085 2.001 2.030 0.692 0.0 0.0 0.312
0.25 1.149 -0.00590 0.0140 0.966 0.01459 0.269 1.121 1.468 1.942 1.941 1.937 0.682 0.0 0.0 0.298
0.30 1.163 -0.00520 0.0150 0.934 0.01458 0.259 0.852 1.172 1.683 1.808 1.770 0.670 0.0 0.0 0.300
0.40 1.200 -0.00422 0.0100 0.959 0.01257 0.248 0.365 0.655 1.127 1.482 1.397 0.659 0.0 0.0 0.346
0.50 1.250 -0.00338 0.0060 1.008 0.01114 0.247 -0.207 0.071 0.515 0.934 0.955 0.653 -0.0126 0.0116 0.338
0.60 1.293 -0.00282 0.0030 1.088 0.01019 0.233 -0.705 -0.429 -0.003 0.394 0.559 0.653 -0.0329 0.0202 0.349
0.70 1.336 -0.00258 0.0025 1.084 0.00979 0.220 -1.144 -0.866 -0.449 -0.111 0.188 0.652 -0.0501 0.0274 0.351
0.80 1.386 -0.00242 0.0022 1.088 0.00944 0.232 -1.609 -1.325 -0.928 -0.620 -0.246 0.647 -0.0650 0.0336 0.356
0.90 1.433 -0.00232 0.0020 1.109 0.00972 0.220 -2.023 -1.732 -1.349 -1.066 -0.643 0.653 -0.0781 0.0391 0.348
1.00 1.479 -0.00220 0.0020 1.115 0.01005 0.211 -2.451 -2.152 -1.776 -1.523 -1.084 0.657 -0.0899 0.0440 0.338
1.25 1.551 -0.00207 0.0020 1.083 0.01003 0.251 -3.243 -2.923 -2.542 -2.327 -1.936 0.660 -0.1148 0.0545 0.313
1.50 1.621 -0.00224 0.0020 1.091 0.00928 0.248 -3.888 -3.548 -3.169 -2.979 -2.661 0.664 -0.1351 0.0630 0.306
2.00 1.694 -0.00201 0.0025 1.055 0.00833 0.263 -4.783 -4.410 -4.039 -3.871 -3.640 0.669 -0.1672 0.0764 0.283
2.50 1.748 -0.00187 0.0028 1.052 0.00776 0.262 -5.444 -5.049 -4.698 -4.496 -4.341 0.671 -0.1921 0.0869 0.287
3.00 1.759 -0.00147 0.0032 1.025 0.00644 0.307 -5.839 -5.431 -5.089 -4.893 -4.758 0.667 -0.2124 0.0954 0.278
4.00 1.826 -0.00195 0.0040 1.044 0.00590 0.353 -6.598 -6.181 -5.882 -5.698 -5.588 0.647 -0.2445 0.1088 0.273
5.00 1.825 -0.00237 0.0050 1.065 0.00510 0.248 -6.752 -6.347 -6.051 -5.873 -5.798 0.643 -0.2694 0.1193 0.275
""")
class ZhaoEtAl2006AscSWISS03(ZhaoEtAl2006AscSWISS05):
"""
This class extends :class:ZhaoEtAl2006Asc,following same strategy
as for :class:ZhaoEtAl2006AscSWISS05
"""
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS03
class ZhaoEtAl2006AscSWISS08(ZhaoEtAl2006AscSWISS05):
"""
This class extends :class:ZhaoEtAl2006Asc,following same strategy
as for :class:ZhaoEtAl2006AscSWISS05 to be used for the
Swiss Hazard Model [2014].
"""
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS08
|
aspose-words/Aspose.Words-for-Java
|
Plugins/Aspose_Words_Java_for_Jython/asposewords/quickstart/UpdateFields.py
|
Python
|
mit
| 2,621
| 0.007631
|
from asposewords import Settings
from com.aspose.words import Document
from com.aspose.words import BreakType
from com.aspose.words import DocumentBuilder
from com.aspose.words import StyleIdentifier
class UpdateFields:
def __init__(self):
dataDir = Settings.dataDir + 'quickstart/'
# Demonstrates how to insert fields and update them using Aspose.Words.
# First create a blank document.
doc = Document()
# Use the document builder to insert some content and fields.
builder = DocumentBuilder(doc)
# Insert a table of contents at the beginning of the document.
builder.insertTableOfContents("\\o \"1-3\" \\h \\z \\u")
builder.writeln()
# Insert some other fields.
builder.write("Page: ")
builder.insertField("PAGE")
builder.write(" of ")
builder.insertField("NUMPAGES")
builder.writeln()
builder.write("Date: "
|
)
builder.insertField("DATE")
# Start the actual document content on the second page.
builder.insertBreak(BreakType.SECTION_BREAK_NEW_PAGE)
# Build a document with complex structure by applying different heading styles thus creating TOC entries.
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_1)
builder.writeln("Heading 1")
builder.getPa
|
ragraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_2)
builder.writeln("Heading 1.1")
builder.writeln("Heading 1.2")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_1)
builder.writeln("Heading 2")
builder.writeln("Heading 3")
# Move to the next page.
builder.insertBreak(BreakType.PAGE_BREAK)
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_2)
builder.writeln("Heading 3.1")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_3)
builder.writeln("Heading 3.1.1")
builder.writeln("Heading 3.1.2")
builder.writeln("Heading 3.1.3")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_2)
builder.writeln("Heading 3.2")
builder.writeln("Heading 3.3")
print "Updating all fields in the document."
# Call the method below to update the TOC.
doc.updateFields()
doc.save(dataDir + "Document Field Update Out.docx")
print "Fields updated in the document successfully."
if __name__ == '__main__':
UpdateFields()
|
kcsaff/getkey
|
tests/unit/test_getkey.py
|
Python
|
mit
| 1,320
| 0
|
# -*- coding: utf-8 -*-
import unittest
from getkey.platforms import PlatformTest
def readchar_fn_factory(stream):
v = [x for x in stream]
def inner(blocking=False):
return v.pop(0)
return inner
class TestGetkey(unittest.TestCase):
def test_basic_character(self):
getkey = PlatformTest('a').getkey
result = getkey()
self.assertEqual('a', result)
def test_string_instead_of_char(self):
char = 'a'
getkey = PlatformTest(char + 'bcde').getkey
result = getkey()
self.assertEqual(char, result)
def test_special_
|
combo_character(self):
char = '\x1b\x01'
getkey = PlatformTest(char + 'foo').getkey
result = getkey()
self.assertEqual(char, result)
def test_special_key(self):
char = '\x1b\x5b\x41'
getkey = PlatformTest(char + 'foo').getkey
result
|
= getkey()
self.assertEqual(char, result)
def test_special_key_combo(self):
char = '\x1b\x5b\x33\x5e'
getkey = PlatformTest(char + 'foo').getkey
result = getkey()
self.assertEqual(char, result)
def test_unicode_character(self):
text = u'Ángel'
getkey = PlatformTest(text).getkey
result = getkey()
self.assertEqual(u'Á', result)
|
JesGor/test_rest
|
apprest/models.py
|
Python
|
gpl-2.0
| 432
| 0.032407
|
from django.db import models
class Empresa(models.Model):
nombre = models.CharField(max_length=100)
ciudad = models.CharField(max_length=50)
sector = models.CharField(max_length=200)
def __str__(self):
return s
|
elf.nombre
class Calificacion(models.Model):
alumno = models.CharField(max_length=100)
calificacion = models.IntegerField(default=0)
empresa = models.ForeignKey(Empresa)
def __str__(self):
|
return self.alumno
|
espenak/django_seleniumhelpers
|
seleniumhelpers/__init__.py
|
Python
|
bsd-3-clause
| 150
| 0
|
fr
|
om seleniumhelpers import SeleniumTestCase
from seleniumhelpers import get_default_timeout
from seleniumhelpers import get_setting_with_envfa
|
llback
|
Conchylicultor/MusicGenerator
|
deepmusic/modules/decoder.py
|
Python
|
apache-2.0
| 7,461
| 0.002949
|
# Copyright 2016 Conchylicultor. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
"""
import tensorflow as tf
import deepmusic.tfutils as tfutils
import deepmusic.songstruct as music
# TODO: Some class from the encoder and decoder are really similar. Could they be merged ?
class DecoderNetwork:
""" Predict a keyboard configuration at step t
This is just an abstract class
Warning: To encapsulate the weights in the right tf scope, they should be defined
within the build function
"""
def __init__(self, args):
"""
Args:
args: parameters of the model
"""
self.args = args
def build(self):
""" Initialize the weights of the model
"""
pass
def init_state(self):
""" Return the initial cell state
"""
return None
def get_cell(self, prev_keyboard, prev_state_enco):
""" Predict the next keyboard state
Args:
prev_keyboard (?): the previous keyboard configuration
prev_state_enco (?): the encoder output state
Return:
Tuple: A tuple containing the predicted keyboard configuration and last decoder state
"""
raise NotImplementedError('Abstract class')
class Rnn(DecoderNetwork):
""" Predict a keyboard configuration at step t
Use a RNN to predict the next configuration
"""
@staticmethod
def get_module_id():
return 'rnn'
def __init__(self, args):
"""
Args:
args: parameters of the model
"""
super().__init__(args)
self.rnn_cell = None
self.project_key = None # Fct which project the decoder output into a single key space
def build(self):
""" Initialize the weights of the model
"""
self.rnn_cell = tfutils.get_rnn_cell(self.args, "deco_cell")
self.project_key = tfutils.single_layer_perceptron([self.args.hidden_size, 1],
'project_key')
def init_state(self):
""" Return the initial cell state
"""
return self.rnn_cell.zero_state(batch_size=self.args.batch_size, dtype=tf.float32)
def get_cell(self, prev_keyboard, prev_state_enco):
""" a RNN decoder
See parent class for arguments details
"""
axis = 1 # The first dimension is the batch, we split the keys
assert prev_keyboard.get_shape()[axis].value == music.NB_NOTES
inputs = tf.split(axis, music.NB_NOTES, prev_keyboard)
outputs, final_state = tf.nn.seq2seq.rnn_decoder(
decoder_inputs=inputs,
initial_state=prev_state_enco,
cell=self.rnn_cell
# TODO: Which loop function (should use prediction) ? : Should take the previous generated input/ground truth (as the global model loop_fct). Need to add a new bool placeholder
)
# Is it better to do the projection before or after the packing ?
next_keys = []
for output in outputs:
next_keys.append(self.project_key(output))
next_keyboard = tf.concat(axis, next_keys)
return next_keyboard, final_state
class Perceptron(DecoderNetwork):
""" Single layer perceptron. Just a proof of concept for the architecture
"""
@staticmethod
def get_module_id():
return 'perceptron'
def __init__(self, args):
"""
Args:
args: parameters of the model
"""
super().__init__(args)
self.project_hidden = None # Fct which decode the previous state
self.project_keyboard = None # Fct which project the decoder output into the keyboard space
def build(self):
""" Initialize the weights of the model
"""
# For projecting on the keyboard space
self.project_hidden = tfutils.single_layer_perceptron([music.NB_NOTES, self.args.hidden_size],
'project_hidden')
# For projecting on the keyboard space
self.project_keyboard = tfutils.single_layer_perceptron([self.args.hidden_size, music.NB_NOTES],
'project_keyboard') # Should we do the activation sigmoid here ?
def get_cell(self, prev_keyboard, prev_state_enco):
""" Simple 1 hidden layer perceptron
See parent class for arguments details
"""
# Don't change the state
next_state_deco = prev_state_enco # Return the last state (Useful ?)
# Compute the next output
hidden_state = self.project_hidden(prev_keyboard)
next_keyboard = self.project_keyboard(hidden_
|
state) # Should we do the activation sigmoid here ? Maybe not because the loss function does it
return next_keyboard, next_state_deco
class Lstm(DecoderNetwork):
""" Multi-layer Lstm. Just a wrapper around the official tf
"""
@staticmethod
def get_module_id():
return 'lstm'
def __init__(self, args, *module_args):
"""
Args:
args: parameters of the model
"""
super().__init__(
|
args)
self.args = args
self.rnn_cell = None
self.project_keyboard = None # Fct which project the decoder output into the ouput space
def build(self):
""" Initialize the weights of the model
"""
# TODO: Control over the the Cell using module arguments instead of global arguments (hidden_size and num_layer) !!
# RNN network
rnn_cell = tf.nn.rnn_cell.BasicLSTMCell(self.args.hidden_size, state_is_tuple=True) # Or GRUCell, LSTMCell(args.hidden_size)
if not self.args.test: # TODO: Should use a placeholder instead
rnn_cell = tf.nn.rnn_cell.DropoutWrapper(rnn_cell, input_keep_prob=1.0, output_keep_prob=0.9) # TODO: Custom values
rnn_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell] * self.args.num_layers, state_is_tuple=True)
self.rnn_cell = rnn_cell
# For projecting on the keyboard space
self.project_output = tfutils.single_layer_perceptron([self.args.hidden_size, 12 + 1], # TODO: HACK: Input/output space hardcoded !!!
'project_output') # Should we do the activation sigmoid here ?
def init_state(self):
""" Return the initial cell state
"""
return self.rnn_cell.zero_state(batch_size=self.args.batch_size, dtype=tf.float32)
def get_cell(self, prev_input, prev_states):
"""
"""
next_output, next_state = self.rnn_cell(prev_input, prev_states[1])
next_output = self.project_output(next_output)
# No activation function here: SoftMax is computed by the loss function
return next_output, next_state
|
ionomy/ion
|
test/functional/p2p-fingerprint.py
|
Python
|
mit
| 5,852
| 0.001025
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various fingerprinting protections.
If an stale block more than a month old or its header are requested by a peer,
the node should pretend that it does not have it to avoid fingerprinting.
"""
import threading
import time
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
CInv,
NodeConnCB,
msg_headers,
msg_block,
msg_getdata,
msg_getheaders,
network_thread_start,
wait_until,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
p2p_port)
class P2PFingerprintTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
# Build a chain of blocks on top of given one
def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time):
blocks = []
for _ in range(nblocks):
coinbase = create_coinbase(prev_height + 1)
block_time = prev_median_time + 1
block = create_block(int(prev_hash, 16), coinbase, block_time)
block.solve()
blocks.append(block)
prev_hash = block.hash
prev_height += 1
prev_median_time = block_time
return blocks
# Send a getdata request for a given block hash
def send_block_request(self, block_hash, node):
msg = msg_getdata()
msg.inv.append(CInv(2, block_hash)) # 2 == "Block"
node.send_message(msg)
# Send a getheaders request for a given single block hash
def send_header_request(self, block_hash, node):
msg = msg_getheaders()
msg.hashstop = block_hash
node.send_message(msg)
# Check whether last block received from node has a given hash
def last_block_equals(self, expected_hash, node):
block_msg = node.last_message.get("block")
return block_msg and block_msg.block.rehash() == expected_hash
# Check whether last block header received from node has a given hash
def last_header_equals(self, expected_hash, node):
headers_msg = node.last_message.get("headers")
return (headers_msg and
headers_msg.headers and
headers_msg.headers[0].rehash() == expected_hash)
# Checks that stale blocks timestamped more than a month ago are not served
# by the node while recent stale blocks and old active chain blocks are.
# This does not currently test that stale blocks timestamped within the
# last month but that have over a month's worth of work are also withheld.
def run_test(self):
node0 = self.nodes[0].add_p2p_connection(NodeConnCB())
network_thread_start()
node0.wait_for_verack()
# Set node time to 60 days ago
self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60)
# Generating a chain of 10 blocks
block_hashes = self.nodes[0].generate(nblocks=10)
# Create longer chain starting 2 blocks before current tip
height = len(block_hashes) - 2
block_hash = block_hashes[height - 1]
block_time = self.nodes[0].getblockheader(block_hash)["mediantime"] + 1
new_blocks = self.build_chain(5, block_hash, height, block_time)
# Force reorg to a longer chain
node0.send_message(msg_headers(new_blocks))
node0.wait_for_getdata()
for block in new_blocks:
node0.send_and_ping(msg_block(block))
# Check that reorg succeeded
assert_equal(self.nodes[0].getblockcount(), 13)
stale_hash = int(block_hashes[-1], 16)
# Check that getdata request for stale block succeeds
self.send_block_request(stale_hash, node0)
test_function = lambda: self.last_block_equals(stale_hash, node0)
wait_until(test_function, timeout=3)
# Check that getheader request for stale block header succeeds
self.send_header_request(stale_hash, node0)
|
test_function = lambda: self.last_header_equals(stale_hash, node0)
wait_until(test_function, timeout=3)
# Longest chain is extended so stale is much older than chain tip
self.nodes[0].setmocktime(0)
tip = self.nodes[0].generate(nblocks=1)[0]
assert_equal(self.nodes[0].getblockcount(), 14)
# Send getdata & getheaders to refresh last received getheader message
block_hash = int(tip, 16)
self.send_block_request(block_hash, node0)
self.s
|
end_header_request(block_hash, node0)
node0.sync_with_ping()
# Request for very old stale block should now fail
self.send_block_request(stale_hash, node0)
time.sleep(3)
assert not self.last_block_equals(stale_hash, node0)
# Request for very old stale block header should now fail
self.send_header_request(stale_hash, node0)
time.sleep(3)
assert not self.last_header_equals(stale_hash, node0)
# Verify we can fetch very old blocks and headers on the active chain
block_hash = int(block_hashes[2], 16)
self.send_block_request(block_hash, node0)
self.send_header_request(block_hash, node0)
node0.sync_with_ping()
self.send_block_request(block_hash, node0)
test_function = lambda: self.last_block_equals(block_hash, node0)
wait_until(test_function, timeout=3)
self.send_header_request(block_hash, node0)
test_function = lambda: self.last_header_equals(block_hash, node0)
wait_until(test_function, timeout=3)
if __name__ == '__main__':
P2PFingerprintTest().main()
|
jluscher/SCANIT
|
scanit_v033.py
|
Python
|
cc0-1.0
| 86,949
| 0.018045
|
#! /usr/bin/env python3
#
# SCANIT - Control A spectrometer and collect data
#
# LICENSE:
# This work is licensed under the Creative Commons Zero License
# Creative Commons CC0.
# To view a copy of this licen
|
se, visit
# http://directory.fsf.org/wiki/License:CC0
# or send a letter to:
# Creative Commons, PO Box 1866, Mountain View, CA 94042, USA.
#
# Author: James Luscher, jluscher@gmail.com
#
import sys, string, time
import serial
#
from pathlib import Path
#
from tkinter import *
from tkinter import font
from tkinter import filedialog
from
|
tkinter.ttk import Progressbar
# from tkinter import ttk
# from tkinter.scrolledtext import *
import tkinter.messagebox as mBox
# import tkinter.simpledialog as simpledialog
import matplotlib
from matplotlib.widgets import Cursor
matplotlib.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import numpy
from numpy import searchsorted
siTitle = 'SCANIT for RetroSPEX [v033]' # Program name and version
TANBG = '#F8E2CD' # Background color
WARNC = '#FFBBFF' # Warning color (pinkish)
ACTIVB = '#F07748' # activebackground color for buttons
#
jjltest = True # print messages, testing
comtest = False # print communication diagnostic details (much!)
## code based on example from: http://robotic-controls.com/learn/python-guis/tkinter-serial
# modified for Python3
#
# Serial() argument added: rtscts=1
#
## NOTE: PATCH @ http://sourceforge.net/p/pyserial/patches/37/
# /usr/local/lib/python3.4/dist-packages/serial/serialposix.py
# at (about) line # 480:
# except select.error as e:
# # ignore EAGAIN errors. all other errors are shown
# * # see also http://www.python.org/dev/peps/pep-3151/#select
# * # patch: James Luscher (re:
# * # http://sourceforge.net/p/pyserial/patches/37/ )
# * #if e[0] != errno.EAGAIN:
# * if e.errno != errno.EAGAIN:
# raise SerialException('read failed: %s' % (e,))
#
# communication commands
COMmands = '''
Command => Response Command sent => Response received
----------------------- LOW LEVEL -- FPGA --------------
? => <Help text> Help (display SPEX commands)
c => <header> Clear Screen
i => i Warm Initialize
f => f Reset FPGA
r AA => r AA DD Read DD from address AA (hex)
w AA DD => w AA DD Write data DD to address AA (or V/V??)
s => s AA DD Show AdrREG and DataReg (AA DD ??)
p => p FF Report PMT Control Register setting
b => B n Report Button State, 0/1 (Off/On)
v n => v n Verbose n=0/1 (Off/On)
----------------------- HIGH LEVEL -- SPECTROMETER -----
L n => L n Set LED n=0/1 (Off/On)
D n FFFF => D n FFFF Load DAC #n with FFFF (hex)
A n => A n FFFF Report High Voltage on #n
E n => E n Enable PMT counter #n (0~7), Clears count
T 7FFFFFFF => T 7FFFFFFF Set Integration time, milliseconds
> => Wait For Bang Start Measurement ('!' signals Done)
P n => P n FFFEFFFEFFFF Dump PMT counter #n (0~2)
X s7FFFFFFF => X s7FFFFFFF Move eXcitation, s=+/- (direction), 7FFFFFFF (steps)
M s7FFFFFFF => M s7FFFFFFF Move eMission, s=+/- (direction), 7FFFFFFF (steps)
----------------------- CONTROLLER INITIATED ALERTS ----
=> ! FF Limit reached [bits?] Motion (done?) time (done?)
=> # n Button activity (reports state? 0/1 (Off/On))
'''
COMchr0 = list('?cifrwspbvLDAET>PXM')
RSPalert = ['!','#']
RSPnorm = ['?','r','s','p','B','A','P']
#make our own buffers
serialPort = None # we always start before any port is found
portName = 'OFFLINE' # ... and any connection established
serOutReady = False # RetroSPEX has been Initialized
#
serInBuffer = "" # 'string' type (character input storage)
serOutBuffer = "".encode() # 'byte' type
serInLines = [] # list of complete input lines
#=====================================================================
## SCANIT Window (GUI window for Spectrometer Control & Data Capture)
#
siWinW = 1260 # width
siWinH = 760 # height
#
siWin = Tk()
siWin.title(siTitle)
siWin['bg'] = TANBG # background color
if jjltest:
siWin.geometry('+670+50') # shift to right for testing
transGeom = '+780+250' # ... for 'transient' screens
else:
siWin.geometry('{}x{}+0+0'.format(siWinW,siWinH))
transGeom = '+110+200' # ... for 'transient' screens
#siWin.geometry('{}x{}+80+50'.format(siWinW,siWinH))
#siWin.geometry('+50+50') # window in upper-left of screen
#
monoFont10 = font.Font(family='Ubuntu Mono', size=10)
monoFont12 = font.Font(family='Ubuntu Mono', size=12)
monoFont14 = font.Font(family='Ubuntu Mono', size=14)
monoFont16 = font.Font(family='Ubuntu Mono', size=16)
monoFont24 = font.Font(family='Ubuntu Mono', size=24)
#=====================================================================
## Global variables (for Spectrometer Control & Data Capture)
#
#==============
# settings: configuration data (from 'settings.txt')
#
# User Default Settings to be used for Measurement
# (settable and saved/restored)
varEXinc = StringVar() # Setting EX Inc Wavelength (nm)
varEMinc = StringVar() # Setting EM Inc Wavelength (nm)
varTMinc = StringVar() # Setting TM Inc time (s)
varEXslit = StringVar() # Slit size EX (nm)
varEXslit = StringVar() # Slit size EM (nm)
varEMhv = StringVar() # EM PMT high voltage (v)
varREFhv = StringVar() # REF PMT high voltage (v)
varREFdiodeG = StringVar() # REF DIODE Gain setting [0,1,2,3]
#
#==============
# Live Data (acquired)
#
varLiveEMhv = StringVar() # Live EM PMT high voltage (v)
varLiveREFhv = StringVar() # Live REF PMT high voltage (v)
varLiveEXpos = StringVar() # Live Excitation position nm
varLiveEMpos = StringVar() # Live Excitation position nm
varLiveSignal = StringVar() # Live Signal (PMT) reading (counts)
varLiveReference = StringVar() # Live Reference (diode/PMT) reading (counts)
#
# Transient states
offLine = True # No Spectrometer connection made (serial - USB)
#
#==============
# scan data acquired
#
varScanDataFileName = StringVar() # File name (path) where Scan Data was saved
varScanDataFileName.set('') # none initially
#
scanDataX = [] # X value sample was taken at (wavelength / time)
scanDataY = [] # Y value of sample - PMT counts
#
ax = None # forward referenc for Plot Object (setPlotTitle())
#
#==============
# background: input data from previous scan (for reference)
#
varRefFileName = StringVar() # File name (path) for Reference Data in Plot
varRefFileName.set('') # none initially
#
inputFileHdr = [] # Header section from fileLoad
inputFileData = [] # Data section from fileload
#
backgroundDataX = [] # X value sample was taken at (wavelength / time)
backgroundDataY = [] # Y value of sample - PMT counts
#
#==============
# dayfile: data about the experiments being done today
#
dayFileData = [] # Data section from fileload / or for writing
#
varDayDate = StringVar() # Date this data was entered
varDayMeaning1 = StringVar() # Meaning of Experiment
varDayMeaning2 = StringVar() # Meaning of Experiment
varDayMeaning3 = StringVar() # Meaning of Experiment
varDayEXslit = StringVar() # Excitation slit wavelength nm
varDayEMslit = StringVar() # Emission slit Wavelength nm
varDayBulb = StringVar() # Bulb Intensity
varDayNotebook = StringVar() # Notebook Page
varDayOther1 = StringVar() # Other comments
varDayOther2 = StringVar() # Other comments
varDayOther3 = StringVar() # Other comments
#
#==============
# type of scan
EXscan = 0
EMscan = 1
TMscan = 2
scanName = [ 'EX', 'EM', 'TM' ]
varScanMode = IntVar() # Determines type of scan taken
#
# settings used for scanned data waveforms
#
varEXwaveStart = Strin
|
leandroreox/gnocchi
|
gnocchi/incoming/file.py
|
Python
|
apache-2.0
| 7,168
| 0
|
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import datetime
import errno
import json
import os
import shutil
import tempfile
import uuid
import numpy
import six
from gnocchi import incoming
from gnocchi import utils
class FileStorage(incoming.IncomingDriver):
def __init__(self, conf):
super(FileStorage, self).__init__(conf)
self.basepath = conf.file_basepath
self.basepath_tmp = os.path.join(self.basepath, 'tmp')
def __str__(self):
return "%s: %s" % (self.__class__.__name__, str(self.basepath))
def upgrade(self, num_sacks):
super(FileStorage, self).upgrade(num_sacks)
utils.ensure_paths([self.basepath_tmp])
def get_storage_sacks(self):
try:
with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX),
'r') as f:
return json.load(f)[self.CFG_SACKS]
except IOError as e:
if e.errno == errno.ENOENT:
return
raise
def set_storage_settings(self, num_sacks):
data = {self.CFG_SACKS: num_sacks}
with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX), 'w') as f:
json.dump(data, f)
utils.ensure_paths([self._sack_path(i)
for i in six.moves.range(self.NUM_SACKS)])
def remove_sack_group(self, num_sacks):
prefix = self.get_sack_prefix(num_sacks)
for i in six.moves.xrange(num_sacks):
shutil.rmtree(os.path.join(self.basepath, prefix % i))
def _sack_path(self, sack):
return os.path.join(self.basepath, self.get_sack_name(sack))
def _measure_path(self, sack, metric_id):
return os.path.join(self._sack_path(sack), six.text_type(metric_id))
def _build_measure_path(self, metric_id, random_id=None):
sack = self.sack_for_metric(metric_id)
path = self._measure_path(sack, metric_id)
if random_id:
if random_id is True:
now = datetime.datetime.utcnow().strftime("_%Y%m%d_%H:%M:%S")
random_id = six.text_type(uuid.uuid4()) + now
return os.path.join(path, random_id)
return path
def _store_new_measures(self, metric, data):
tmpfile = tempfile.NamedTemporaryFile(
prefix='gnocchi', dir=self.basepath_tmp,
delete=False)
tmpfile.write(data)
tmpfile.close()
path = self._build_measure_path(metric.id, True)
while True:
try:
o
|
s.rename
|
(tmpfile.name, path)
break
except OSError as e:
if e.errno != errno.ENOENT:
raise
try:
os.mkdir(self._build_measure_path(metric.id))
except OSError as e:
# NOTE(jd) It's possible that another process created the
# path just before us! In this case, good for us, let's do
# nothing then! (see bug #1475684)
if e.errno != errno.EEXIST:
raise
def _build_report(self, details):
report_vars = {'metrics': 0, 'measures': 0, 'metric_details': {}}
if details:
def build_metric_report(metric, sack):
report_vars['metric_details'][metric] = len(
self._list_measures_container_for_metric_id_str(sack,
metric))
else:
def build_metric_report(metric, sack):
report_vars['metrics'] += 1
report_vars['measures'] += len(
self._list_measures_container_for_metric_id_str(sack,
metric))
for i in six.moves.range(self.NUM_SACKS):
for metric in self.list_metric_with_measures_to_process(i):
build_metric_report(metric, i)
return (report_vars['metrics'] or
len(report_vars['metric_details'].keys()),
report_vars['measures'] or
sum(report_vars['metric_details'].values()),
report_vars['metric_details'] if details else None)
def list_metric_with_measures_to_process(self, sack):
return set(self._list_target(self._sack_path(sack)))
def _list_measures_container_for_metric_id_str(self, sack, metric_id):
return self._list_target(self._measure_path(sack, metric_id))
def _list_measures_container_for_metric_id(self, metric_id):
return self._list_target(self._build_measure_path(metric_id))
@staticmethod
def _list_target(target):
try:
return os.listdir(target)
except OSError as e:
# Some other process treated this one, then do nothing
if e.errno == errno.ENOENT:
return []
raise
def _delete_measures_files_for_metric_id(self, metric_id, files):
for f in files:
try:
os.unlink(self._build_measure_path(metric_id, f))
except OSError as e:
# Another process deleted it in the meantime, no prob'
if e.errno != errno.ENOENT:
raise
try:
os.rmdir(self._build_measure_path(metric_id))
except OSError as e:
# ENOENT: ok, it has been removed at almost the same time
# by another process
# ENOTEMPTY: ok, someone pushed measure in the meantime,
# we'll delete the measures and directory later
# EEXIST: some systems use this instead of ENOTEMPTY
if e.errno not in (errno.ENOENT, errno.ENOTEMPTY, errno.EEXIST):
raise
def delete_unprocessed_measures_for_metric_id(self, metric_id):
files = self._list_measures_container_for_metric_id(metric_id)
self._delete_measures_files_for_metric_id(metric_id, files)
def has_unprocessed(self, metric):
return os.path.isdir(self._build_measure_path(metric.id))
@contextlib.contextmanager
def process_measure_for_metric(self, metric):
files = self._list_measures_container_for_metric_id(metric.id)
measures = self._make_measures_array()
for f in files:
abspath = self._build_measure_path(metric.id, f)
with open(abspath, "rb") as e:
measures = numpy.append(
measures, self._unserialize_measures(f, e.read()))
yield measures
self._delete_measures_files_for_metric_id(metric.id, files)
|
HopeFOAM/HopeFOAM
|
ThirdParty-0.1/ParaView-5.0.1/Examples/Catalyst/PythonDolfinExample/simulation-catalyst-step1.py
|
Python
|
gpl-3.0
| 4,910
| 0.003259
|
"""This demo program solves the incompressible Navier-Stokes equations
on an L-shaped domain using Chorin's splitting method."""
# Copyright (C) 2010-2011 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Mikael Mortensen 2011
#
# First added: 2010-08-30
# Last changed: 2011-06-30
#
# SC14 Paraview's Catalyst tutorial
#
# Step 1 : initialization
#
# [SC14-Catalyst] we need a python environment that enables
|
import of both Dolfin and ParaView
execfile("simulation-env.py")
# [SC14-Catalyst] import paraview, vtk and paraview's simple API
import sys
imp
|
ort paraview
import paraview.vtk as vtk
import paraview.simple as pvsimple
# [SC14-Catalyst] check for command line arguments
if len(sys.argv) != 3:
print "command is 'python",sys.argv[0],"<script name> <number of time steps>'"
sys.exit(1)
# [SC14-Catalyst] initialize and read input parameters
paraview.options.batch = True
paraview.options.symmetric = True
# [SC14-Catalyst] import user co-processing script
import vtkPVCatalystPython
import os
scriptpath, scriptname = os.path.split(sys.argv[1])
sys.path.append(scriptpath)
if scriptname.endswith(".py"):
print 'script name is ', scriptname
scriptname = scriptname[0:len(scriptname)-3]
try:
cpscript = __import__(scriptname)
except:
print sys.exc_info()
print 'Cannot find ', scriptname, ' -- no coprocessing will be performed.'
sys.exit(1)
# Begin demo
from dolfin import *
# Print log messages only from the root process in parallel
parameters["std_out_all_processes"] = False;
# Load mesh from file
mesh = Mesh(DOLFIN_EXAMPLE_DATA_DIR+"/lshape.xml.gz")
# Define function spaces (P2-P1)
V = VectorFunctionSpace(mesh, "Lagrange", 2)
Q = FunctionSpace(mesh, "Lagrange", 1)
# Define trial and test functions
u = TrialFunction(V)
p = TrialFunction(Q)
v = TestFunction(V)
q = TestFunction(Q)
# Set parameter values
dt = 0.01
T = 3
nu = 0.01
# Define time-dependent pressure boundary condition
p_in = Expression("sin(3.0*t)", t=0.0)
# Define boundary conditions
noslip = DirichletBC(V, (0, 0),
"on_boundary && \
(x[0] < DOLFIN_EPS | x[1] < DOLFIN_EPS | \
(x[0] > 0.5 - DOLFIN_EPS && x[1] > 0.5 - DOLFIN_EPS))")
inflow = DirichletBC(Q, p_in, "x[1] > 1.0 - DOLFIN_EPS")
outflow = DirichletBC(Q, 0, "x[0] > 1.0 - DOLFIN_EPS")
bcu = [noslip]
bcp = [inflow, outflow]
# Create functions
u0 = Function(V)
u1 = Function(V)
p1 = Function(Q)
# Define coefficients
k = Constant(dt)
f = Constant((0, 0))
# Tentative velocity step
F1 = (1/k)*inner(u - u0, v)*dx + inner(grad(u0)*u0, v)*dx + \
nu*inner(grad(u), grad(v))*dx - inner(f, v)*dx
a1 = lhs(F1)
L1 = rhs(F1)
# Pressure update
a2 = inner(grad(p), grad(q))*dx
L2 = -(1/k)*div(u1)*q*dx
# Velocity update
a3 = inner(u, v)*dx
L3 = inner(u1, v)*dx - k*inner(grad(p1), v)*dx
# Assemble matrices
A1 = assemble(a1)
A2 = assemble(a2)
A3 = assemble(a3)
# Use amg preconditioner if available
prec = "amg" if has_krylov_solver_preconditioner("amg") else "default"
# Create files for storing solution
ufile = File("results/velocity.pvd")
pfile = File("results/pressure.pvd")
# Time-stepping
maxtimestep = int(sys.argv[2])
tstep = 0
t = dt
while tstep < maxtimestep:
# Update pressure boundary condition
p_in.t = t
# Compute tentative velocity step
begin("Computing tentative velocity")
b1 = assemble(L1)
[bc.apply(A1, b1) for bc in bcu]
solve(A1, u1.vector(), b1, "gmres", "default")
end()
# Pressure correction
begin("Computing pressure correction")
b2 = assemble(L2)
[bc.apply(A2, b2) for bc in bcp]
solve(A2, p1.vector(), b2, "gmres", prec)
end()
# Velocity correction
begin("Computing velocity correction")
b3 = assemble(L3)
[bc.apply(A3, b3) for bc in bcu]
solve(A3, u1.vector(), b3, "gmres", "default")
end()
# Plot solution [SC14-Catalyst] Not anymore
# plot(p1, title="Pressure", rescale=True)
# plot(u1, title="Velocity", rescale=True)
# Save to file [SC14-Catalyst] Not anymore
# ufile << u1
# pfile << p1
# Move to next time step
u0.assign(u1)
t += dt
tstep += 1
print "t =", t, "step =",tstep
# Hold plot [SC14-Catalyst] Not anymore
# interactive()
|
almet/whiskerboard
|
settings/live.py
|
Python
|
mit
| 161
| 0.006211
|
from __future__ import absolute_import
from .base import *
from .local import *
CACHE_B
|
ACKEND = 'redis_cache.cache://127.0.0.1:6379/?ti
|
meout=15'
DEBUG = False
|
llv22/python3_learning
|
chapter4/sample.py
|
Python
|
apache-2.0
| 1,306
| 0.006126
|
"""
Learning python3
"""
def document_it(func):
'''
decractor for func, only print doc of func.
'''
def new_function(*args, **kwargs):
'''
internal function for wrappering of func and print out function parameter and result.
'''
print('Running functions:', func.__name__)
print('Positional arguments:', args)
print('Keyw
|
ord arguments:', kwargs)
result = func(*args, **kwargs)
print('Result:', result)
return result
return new_funct
|
ion
@document_it
def add_ints0(add_a, add_b):
'''
add with decrator of @document_it.
'''
return add_a + add_b
def square_it(func):
'''
decractor for func, return square of func returned value.
'''
def new_function(*args, **kwargs):
'''
internal function for wrappering of func and return square of func as result.
'''
result = func(*args, **kwargs)
return result * result
return new_function
@document_it
@square_it
def add_ints1(add_a, add_b):
'''
add with decrator of @square_it @document_it in order.
'''
return add_a + add_b
@square_it
@document_it
def add_ints2(add_a, add_b):
'''
add with decrator of @document_it @square_it in order.
'''
return add_a + add_b
|
Jokeren/neon
|
tests/test_gru.py
|
Python
|
apache-2.0
| 16,734
| 0.000418
|
# ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
This test compares the NEON GRU layer against a numpy reference GRU
implementation and compares the NEON GR
|
U bprop deltas to the gradients
estimated by finite differences.
The numpy reference GRU contains static methods for forward pass
and backward pass.
It runs a SINGLE layer of GRU and compare numerical values
The following are made sure to be the same in both GRUs
- initial h values (all zeros)
- initial W, b (ones or random values)
- input data (random data matrix)
- input error (random data matrix)
- the data shape inside GRU_ref is seq_len, 1, i
|
nput_size.
Need transpose
- the data shape inside GRU (neon) is is batch_size, seq_len * batch_size
"""
import itertools as itt
import numpy as np
from neon import NervanaObject, logger as neon_logger
from neon.initializers.initializer import Constant, Gaussian
from neon.layers import GRU
from neon.transforms import Logistic, Tanh
from neon.layers.container import DeltasTree
from gru_ref import GRU as RefGRU
from utils import allclose_with_out
def pytest_generate_tests(metafunc):
bsz_rng = [1]
if 'refgruargs' in metafunc.fixturenames:
fargs = []
if metafunc.config.option.all:
seq_rng = [2, 3, 4]
inp_rng = [3, 5, 10]
out_rng = [3, 5, 10]
else:
seq_rng = [3]
inp_rng = [5]
out_rng = [10]
fargs = itt.product(seq_rng, inp_rng, out_rng, bsz_rng)
metafunc.parametrize('refgruargs', fargs)
if 'gradgruargs' in metafunc.fixturenames:
fargs = []
if metafunc.config.option.all:
seq_rng = [2, 3]
inp_rng = [5, 10]
out_rng = [3, 5, 10]
else:
seq_rng = [3]
inp_rng = [5]
out_rng = [10]
fargs = itt.product(seq_rng, inp_rng, out_rng, bsz_rng)
metafunc.parametrize('gradgruargs', fargs)
def test_ref_compare_ones(backend_default, refgruargs):
# run comparison with reference code
# for all ones init
seq_len, input_size, hidden_size, batch_size = refgruargs
NervanaObject.be.bsz = NervanaObject.be.batch_size = batch_size
check_gru(seq_len, input_size, hidden_size,
batch_size, Constant(val=1.0), [1.0, 0.0])
def test_ref_compare_rand(backend_default, refgruargs):
# run comparison with reference code
# for all ones init
seq_len, input_size, hidden_size, batch_size = refgruargs
NervanaObject.be.bsz = NervanaObject.be.batch_size = batch_size
check_gru(seq_len, input_size, hidden_size, batch_size,
Gaussian())
def test_ref_compare_rand_init_state(backend_default, refgruargs):
seq_len, input_size, hidden_size, batch_size = refgruargs
NervanaObject.be.bsz = NervanaObject.be.batch_size = batch_size
check_gru(seq_len, input_size, hidden_size, batch_size,
Gaussian(), add_init_state=True)
# compare neon GRU to reference GRU implementation
def check_gru(seq_len, input_size, hidden_size,
batch_size, init_func, inp_moms=[0.0, 1.0], add_init_state=False):
# init_func is the initializer for the model params
# inp_moms is the [ mean, std dev] of the random input
input_shape = (input_size, seq_len * batch_size)
output_shape = (hidden_size, seq_len * batch_size)
slice_shape = (hidden_size, batch_size)
NervanaObject.be.bsz = NervanaObject.be.batch_size = batch_size
# neon GRU
gru = GRU(hidden_size,
init_func,
activation=Tanh(),
gate_activation=Logistic())
# generate random input tensor
inp = np.random.rand(*input_shape) * inp_moms[1] + inp_moms[0]
inp_dev = gru.be.array(inp)
# generate random deltas tensor
deltas = np.random.randn(*output_shape)
# run neon fprop
gru.configure((input_size, seq_len))
gru.prev_layer = True
gru.allocate()
test_buffer = DeltasTree()
gru.allocate_deltas(test_buffer)
test_buffer.allocate_buffers()
gru.set_deltas(test_buffer)
if add_init_state:
init_state = np.random.rand(*slice_shape)*inp_moms[1] + inp_moms[0]
init_state_dev = gru.be.array(init_state)
gru.fprop(inp_dev, init_state=init_state_dev)
else:
gru.fprop(inp_dev)
# reference numpy GRU
gru_ref = RefGRU(input_size, hidden_size)
WGRU = gru_ref.weights
# make ref weights and biases the same with neon model
r_range = list(range(hidden_size))
z_range = list(range(hidden_size, hidden_size * 2))
c_range = list(range(hidden_size * 2, hidden_size * 3))
WGRU[gru_ref.weights_ind_br][:] = gru.b.get()[r_range]
WGRU[gru_ref.weights_ind_bz][:] = gru.b.get()[z_range]
WGRU[gru_ref.weights_ind_bc][:] = gru.b.get()[c_range]
WGRU[gru_ref.weights_ind_Wxr][:] = gru.W_input.get()[r_range]
WGRU[gru_ref.weights_ind_Wxz][:] = gru.W_input.get()[z_range]
WGRU[gru_ref.weights_ind_Wxc][:] = gru.W_input.get()[c_range]
WGRU[gru_ref.weights_ind_Rhr][:] = gru.W_recur.get()[r_range]
WGRU[gru_ref.weights_ind_Rhz][:] = gru.W_recur.get()[z_range]
WGRU[gru_ref.weights_ind_Rhc][:] = gru.W_recur.get()[c_range]
# transpose input X and do fprop
# the reference code expects these shapes:
# input_shape: (seq_len, input_size, batch_size)
# output_shape: (seq_len, hidden_size, batch_size)
inp_ref = inp.copy().T.reshape(
seq_len, batch_size, input_size).swapaxes(1, 2)
deltas_ref = deltas.copy().T.reshape(
seq_len, batch_size, hidden_size).swapaxes(1, 2)
if add_init_state:
init_state_ref = init_state.copy()
(dWGRU_ref, h_ref_list, dh_ref_list,
dr_ref_list, dz_ref_list, dc_ref_list) = gru_ref.lossFun(inp_ref,
deltas_ref,
init_state_ref)
else:
(dWGRU_ref, h_ref_list, dh_ref_list,
dr_ref_list, dz_ref_list, dc_ref_list) = gru_ref.lossFun(inp_ref,
deltas_ref)
neon_logger.display('====Verifying hidden states====')
assert allclose_with_out(gru.outputs.get(),
h_ref_list,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('fprop is verified')
# now test the bprop
neon_logger.display('Making sure neon GRU matches numpy GRU in bprop')
gru.bprop(gru.be.array(deltas))
# grab the delta W from gradient buffer
dWinput_neon = gru.dW_input.get()
dWrecur_neon = gru.dW_recur.get()
db_neon = gru.db.get()
dWxr_neon = dWinput_neon[r_range]
dWxz_neon = dWinput_neon[z_range]
dWxc_neon = dWinput_neon[c_range]
dWrr_neon = dWrecur_neon[r_range]
dWrz_neon = dWrecur_neon[z_range]
dWrc_neon = dWrecur_neon[c_range]
dbr_neon = db_neon[r_range]
dbz_neon = db_neon[z_range]
dbc_neon = db_neon[c_range]
drzc_neon = gru.rzhcan_delta_buffer.get()
dr_neon = drzc_neon[r_range]
dz_neon = drzc_neon[z_range]
dc_neon = drzc_neon[c_range]
dWxr_ref = dWGRU_ref[gru_ref.dW_ind_Wxr]
dWxz_ref = dWGRU_ref[gru_ref.dW_ind_Wxz]
dWxc_ref = dWGRU_ref[gru_ref.dW_ind_Wxc]
dWrr_ref = dWGRU_ref[gru_ref.dW_ind_Rhr]
dWrz_ref = dWGRU_ref[gru_ref.dW_ind_Rhz]
dWrc_ref = dWGRU_ref[gru_ref.dW
|
kerneltask/micropython
|
tests/unicode/unicode_subscr.py
|
Python
|
mit
| 336
| 0
|
a = "¢пр"
print(a[0], a[0:1])
print(a[1], a[1:2])
print(
|
a[2], a[2:3])
try:
print(a[3])
except IndexError:
print("IndexError")
print(a[3:4])
print(a[-1])
print(a[-2], a[-2:-1])
print(a[-3], a[-3:-2])
try:
print(a[-4])
except IndexError:
print("IndexError")
print(a[-4:-3])
|
print(a[0:2])
print(a[1:3])
print(a[2:4])
|
chrisjaquet/FreeCAD
|
src/Mod/Arch/ArchSite.py
|
Python
|
lgpl-2.1
| 5,788
| 0.017623
|
# -*- coding: utf8 -*-
#***************************************************************************
#* *
#* Copyright (c) 2011 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD,Draft,ArchCommands,ArchFloor
if FreeCAD.GuiUp:
import FreeCADGui
from PySide import QtCore, QtGui
from DraftTools import translate
e
|
lse:
def transla
|
te(ctxt,txt):
return txt
__title__="FreeCAD Site"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
def makeSite(objectslist=None,baseobj=None,name="Site"):
'''makeBuilding(objectslist): creates a site including the
objects from the given list.'''
obj = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroupPython",name)
obj.Label = translate("Arch",name)
_Site(obj)
if FreeCAD.GuiUp:
_ViewProviderSite(obj.ViewObject)
if objectslist:
obj.Group = objectslist
if baseobj:
obj.Terrain = baseobj
return obj
class _CommandSite:
"the Arch Site command definition"
def GetResources(self):
return {'Pixmap' : 'Arch_Site',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Arch_Site","Site"),
'Accel': "S, I",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Arch_Site","Creates a site object including selected objects.")}
def IsActive(self):
return not FreeCAD.ActiveDocument is None
def Activated(self):
sel = FreeCADGui.Selection.getSelection()
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
link = p.GetBool("FreeLinking",False)
siteobj = []
warning = False
for obj in sel :
if Draft.getType(obj) == "Building":
siteobj.append(obj)
else :
if link == True :
siteobj.append(obj)
else:
warning = True
if warning :
message = translate( "Arch" , "Please select only Building objects or nothing!\n\
Site are not allowed to accept other object than Building.\n\
Other objects will be removed from the selection.\n\
You can change that in the preferences." )
ArchCommands.printMessage( message )
if sel and len(siteobj) == 0:
message = translate( "Arch" , "There is no valid object in the selection.\n\
Site creation aborted." )
ArchCommands.printMessage( message )
else :
ss = "[ "
for o in siteobj:
ss += "FreeCAD.ActiveDocument." + o.Name + ", "
ss += "]"
FreeCAD.ActiveDocument.openTransaction(translate("Arch","Create Site"))
FreeCADGui.addModule("Arch")
FreeCADGui.doCommand("Arch.makeSite("+ss+")")
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
class _Site(ArchFloor._Floor):
"The Site object"
def __init__(self,obj):
ArchFloor._Floor.__init__(self,obj)
obj.addProperty("App::PropertyLink","Terrain","Arch","The terrain of this site")
obj.addProperty("App::PropertyString","Address","Arch","The street and housenumber of this site")
obj.addProperty("App::PropertyString","PostalCode","Arch","The postal or zip code of this site")
obj.addProperty("App::PropertyString","City","Arch","The city of this site")
obj.addProperty("App::PropertyString","Country","Arch","The country of this site")
obj.addProperty("App::PropertyFloat","Latitude","Arch","The latitude of this site")
obj.addProperty("App::PropertyFloat","Longitude","Arch","The latitude of this site")
obj.addProperty("App::PropertyString","Url","Arch","An url that shows this site in a mapping website")
self.Type = "Site"
obj.setEditorMode('Height',2)
class _ViewProviderSite(ArchFloor._ViewProviderFloor):
"A View Provider for the Site object"
def __init__(self,vobj):
ArchFloor._ViewProviderFloor.__init__(self,vobj)
def getIcon(self):
import Arch_rc
return ":/icons/Arch_Site_Tree.svg"
def claimChildren(self):
return self.Object.Group+[self.Object.Terrain]
if FreeCAD.GuiUp:
FreeCADGui.addCommand('Arch_Site',_CommandSite())
|
sjirjies/pyJacqQ
|
tests/generate_null_dataset.py
|
Python
|
gpl-3.0
| 4,011
| 0.00374
|
# This file is part of jacqq.py
# Copyright (C) 2015 Saman Jirjies - sjirjies(at)asu(dot)edu.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import csv
import argparse
# This script generates a null data set where all outputs are 0 when passed through Jacquez's Q.
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Generate a lattice of pentagon case-control points",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('x_size', type=int, help="Number of clusters to form in the x direction.")
parser.add_argument('y_size', type=int, help="Number of clusters to form in the y direction.")
parser.add_argument('histories_data', help="Location to write individuals' residential history.")
parser.add_argument('details_data', help="Location to write individuals' status data set.")
parser.add_argument('focus_data', help="Location to write focus data set")
args = parser.parse_args()
lattice_size_y = args.x_size
lattice_size_x = args.y_size
case_locations = []
for xi in range(0, lattice_size_x):
for yi in range(0, lattice_size_y):
case_locations.append((2+(10*xi), 2+(10*yi)))
focus_locations = []
for xi in range(0, lattice_size_x - 1):
for yi in range(0, lattice_size_y - 1):
focus_locations.append((7+(10*xi), 7+(10*yi)))
# Generate details data
csv_file = open(args.details_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'is_case'))
for case_index, case_point in enumerate(case_locations):
writer.writerow(('case_'+str(case_index+1), 1))
for control_name in ('A', 'B', 'C', 'D', 'E'):
writer.writerow(('control_'+str(case_index+1)+control_name, 0)
|
)
finally:
csv_file.close()
# Generate time series data
csv_file = open(args.histories_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('
|
ID', 'start_date', 'end_date', 'x', 'y'))
start_date = '20150101'
end_date = '20150102'
for id_index, case_point in enumerate(case_locations):
writer.writerow(('case_'+str(id_index+1), start_date, end_date, case_point[0], case_point[1]))
writer.writerow(('control_'+str(id_index+1)+'A', start_date, end_date, case_point[0], case_point[1]-2))
writer.writerow(('control_'+str(id_index+1)+'B', start_date, end_date, case_point[0]+2, case_point[1]))
writer.writerow(('control_'+str(id_index+1)+'C', start_date, end_date, case_point[0]+1, case_point[1]+1))
writer.writerow(('control_'+str(id_index+1)+'D', start_date, end_date, case_point[0]-1, case_point[1]+1))
writer.writerow(('control_'+str(id_index+1)+'E', start_date, end_date, case_point[0]-2, case_point[1]))
finally:
csv_file.close()
print("Finished generating null dataset")
# Generate focus data
csv_file = open(args.focus_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'start_date', 'end_date', 'x', 'y'))
start_date = '20150101'
end_date = '20150102'
for index, location in enumerate(focus_locations):
writer.writerow(('focus_' + str(index+1), start_date, end_date, location[0], location[1]))
finally:
csv_file.close()
|
astropy/photutils
|
photutils/centroids/tests/test_gaussian.py
|
Python
|
bsd-3-clause
| 4,408
| 0
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the core module.
"""
import itertools
from contextlib import nullcontext
from astropy.modeling.models import Gaussian1D, Gaussian2D
from astrop
|
y.utils.exceptions import A
|
stropyUserWarning
import numpy as np
from numpy.testing import assert_allclose
import pytest
from ..gaussian import centroid_1dg, centroid_2dg, _gaussian1d_moments
from ...utils._optional_deps import HAS_SCIPY # noqa
XCEN = 25.7
YCEN = 26.2
XSTDS = [3.2, 4.0]
YSTDS = [5.7, 4.1]
THETAS = np.array([30., 45.]) * np.pi / 180.
DATA = np.zeros((3, 3))
DATA[0:2, 1] = 1.
DATA[1, 0:2] = 1.
DATA[1, 1] = 2.
# NOTE: the fitting routines in astropy use scipy.optimize
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize(('x_std', 'y_std', 'theta'),
list(itertools.product(XSTDS, YSTDS, THETAS)))
def test_centroids(x_std, y_std, theta):
model = Gaussian2D(2.4, XCEN, YCEN, x_stddev=x_std, y_stddev=y_std,
theta=theta)
y, x = np.mgrid[0:50, 0:47]
data = model(x, y)
xc, yc = centroid_1dg(data)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
xc, yc = centroid_2dg(data)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
# test with errors
error = np.sqrt(data)
xc, yc = centroid_1dg(data, error=error)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
xc, yc = centroid_2dg(data, error=error)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
# test with mask
mask = np.zeros(data.shape, dtype=bool)
data[10, 10] = 1.e5
mask[10, 10] = True
xc, yc = centroid_1dg(data, mask=mask)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
xc, yc = centroid_2dg(data, mask=mask)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize('use_mask', [True, False])
def test_centroids_nan_withmask(use_mask):
xc_ref = 24.7
yc_ref = 25.2
model = Gaussian2D(2.4, xc_ref, yc_ref, x_stddev=5.0, y_stddev=5.0)
y, x = np.mgrid[0:50, 0:50]
data = model(x, y)
data[20, :] = np.nan
if use_mask:
mask = np.zeros(data.shape, dtype=bool)
mask[20, :] = True
nwarn = 0
ctx = nullcontext()
else:
mask = None
nwarn = 1
ctx = pytest.warns(AstropyUserWarning,
match='Input data contains non-finite values')
with ctx as warnlist:
xc, yc = centroid_1dg(data, mask=mask)
assert_allclose([xc, yc], [xc_ref, yc_ref], rtol=0, atol=1.e-3)
if nwarn == 1:
assert len(warnlist) == nwarn
with ctx as warnlist:
xc, yc = centroid_2dg(data, mask=mask)
assert_allclose([xc, yc], [xc_ref, yc_ref], rtol=0, atol=1.e-3)
if nwarn == 1:
assert len(warnlist) == nwarn
@pytest.mark.skipif('not HAS_SCIPY')
def test_invalid_mask_shape():
data = np.zeros((4, 4))
mask = np.zeros((2, 2), dtype=bool)
with pytest.raises(ValueError):
centroid_1dg(data, mask=mask)
with pytest.raises(ValueError):
centroid_2dg(data, mask=mask)
with pytest.raises(ValueError):
_gaussian1d_moments(data, mask=mask)
@pytest.mark.skipif('not HAS_SCIPY')
def test_invalid_error_shape():
error = np.zeros((2, 2), dtype=bool)
with pytest.raises(ValueError):
centroid_1dg(np.zeros((4, 4)), error=error)
with pytest.raises(ValueError):
centroid_2dg(np.zeros((4, 4)), error=error)
@pytest.mark.skipif('not HAS_SCIPY')
def test_centroid_2dg_dof():
data = np.ones((2, 2))
with pytest.raises(ValueError):
centroid_2dg(data)
def test_gaussian1d_moments():
x = np.arange(100)
desired = (75, 50, 5)
g = Gaussian1D(*desired)
data = g(x)
result = _gaussian1d_moments(data)
assert_allclose(result, desired, rtol=0, atol=1.e-6)
data[0] = 1.e5
mask = np.zeros(data.shape).astype(bool)
mask[0] = True
result = _gaussian1d_moments(data, mask=mask)
assert_allclose(result, desired, rtol=0, atol=1.e-6)
data[0] = np.nan
mask = np.zeros(data.shape).astype(bool)
mask[0] = True
with pytest.warns(AstropyUserWarning) as warnlist:
result = _gaussian1d_moments(data, mask=mask)
assert_allclose(result, desired, rtol=0, atol=1.e-6)
assert len(warnlist) == 1
|
SEMAFORInformatik/femagtools
|
femagtools/plot.py
|
Python
|
bsd-2-clause
| 54,354
| 0.000515
|
# -*- coding: utf-8 -*-
"""
femagtools.plot
~~~~~~~~~~~~~~~
Creating plots
"""
import numpy as np
import scipy.interpolate as ip
import logging
try:
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from mpl_toolkits.mplot3d import Axes3D
matplotlibversion = matplotlib.__version__
except ImportError: # ModuleNotFoundError:
matplotlibversion = 0
logger = logging.getLogger("femagtools.plot")
def _create_3d_axis():
"""creates a subplot with 3d projection if one does not already exist"""
from matplotlib.projections import get_projection_class
from matplotlib import _pylab_helpers
create_axis = True
if _pylab_helpers.Gcf.get_active() is not None:
if isinstance(plt.gca(), get_projection_class('3d')):
create_axis = False
if create_axis:
plt.figure()
plt.subplot(111, projection='3d')
def _plot_surface(ax, x, y, z, labels, azim=None):
"""helper function for surface plots"""
# ax.tick_params(axis='both', which='major', pad=-3)
assert np.size(x) > 1 and np.size(y) > 1 and np.size(z) > 1
if azim is not None:
ax.azim = azim
X, Y = np.meshgrid(x, y)
Z = np.ma.masked_invalid(z)
ax.plot_surface(X, Y, Z,
rstride=1, cstride=1,
cmap=cm.viridis, alpha=0.85,
vmin=np.nanmin(z), vmax=np.nanmax(z),
linewidth=0, antialiased=True)
# edgecolor=(0, 0, 0, 0))
# ax.set_xticks(xticks)
# ax.set_yticks(yticks)
# ax.set_zticks(zticks)
ax.set_xlabel(labels[0])
ax.set_ylabel(labels[1])
ax.set_title(labels[2])
# plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)
def __phasor_plot(ax, up, idq, uxdq):
uref = max(up, uxdq[0])
uxd = uxdq[0]/uref
uxq = uxdq[1]/uref
u1d, u1q = (uxd, 1+uxq)
u1 = np.sqrt(u1d**2 + u1q**2)*uref
i1 = np.linalg.norm(idq)
i1d, i1q = (idq[0]/i1, idq[1]/i1)
qhw = 6 # width arrow head
qhl = 15 # length arrow head
qlw = 2 # line width
qts = 10 # textsize
# Length of the Current adjust to Ud: Initally 0.9, Maier(Oswald) = 0.5
curfac = max(0.9, 1.5*i1q/up)
def label_line(ax, X, Y, U, V, label, color='k', size=8):
"""Add a label to a line, at the proper angle.
Arguments
---------
line : matplotlib.lines.Line2D object,
label : str
x : float
x-position to place center of text (in data coordinated
y : float
y-position to place center of text (in data coordinates)
color : str
size : float
"""
x1, x2 = X, X + U
y1, y2 = Y, Y + V
if y2 == 0:
y2 = y1
if x2 == 0:
x2 = x1
x = (x1 + x2) / 2
y = (y1 + y2) / 2
slope_degrees = np.rad2deg(np.angle(U + V * 1j
|
))
if slope_degrees < 0:
slope_degrees += 180
if 90 < slope_degrees <= 270:
slope_degrees += 180
x_offset = np.sin(np.deg2rad(slope_degrees))
y_offset = np.cos(np.deg2rad(slope_degrees))
bbox_props = dict(boxstyle="Round4, pad=0.1", fc="white", lw=0)
text = ax.annotate(label, xy=(x, y), xytext=(x_offset * 10, y_offset * 8),
textcoords='offset points',
|
size=size, color=color,
horizontalalignment='center',
verticalalignment='center',
fontfamily='monospace', fontweight='bold', bbox=bbox_props)
text.set_rotation(slope_degrees)
return text
if ax == 0:
ax = plt.gca()
ax.axes.xaxis.set_ticklabels([])
ax.axes.yaxis.set_ticklabels([])
# ax.set_aspect('equal')
ax.set_title(
r'$U_1$={0} V, $I_1$={1} A, $U_p$={2} V'.format(
round(u1, 1), round(i1, 1), round(up, 1)), fontsize=14)
up /= uref
ax.quiver(0, 0, 0, up, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw/2, headlength=qhl/2, headaxislength=qhl/2, width=qlw*2, color='k')
label_line(ax, 0, 0, 0, up, '$U_p$', 'k', qts)
ax.quiver(0, 0, u1d, u1q, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='r')
label_line(ax, 0, 0, u1d, u1q, '$U_1$', 'r', qts)
ax.quiver(0, 1, uxd, 0, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='g')
label_line(ax, 0, 1, uxd, 0, '$U_d$', 'g', qts)
ax.quiver(uxd, 1, 0, uxq, angles='xy', scale_units='xy', scale=1, units='dots',
headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='g')
label_line(ax, uxd, 1, 0, uxq, '$U_q$', 'g', qts)
ax.quiver(0, 0, curfac*i1d, curfac*i1q, angles='xy', scale_units='xy', scale=1,
units='dots', headwidth=qhw, headlength=qhl, headaxislength=qhl, width=qlw, color='b')
label_line(ax, 0, 0, curfac*i1d, curfac*i1q, '$I_1$', 'b', qts)
xmin, xmax = (min(0, uxd, i1d), max(0, i1d, uxd))
ymin, ymax = (min(0, i1q, 1-uxq), max(1, i1q, 1+uxq))
ax.set_xlim([xmin-0.1, xmax+0.1])
ax.set_ylim([ymin-0.1, ymax+0.1])
ax.grid(True)
def i1beta_phasor(up, i1, beta, r1, xd, xq, ax=0):
"""creates a phasor plot
up: internal voltage
i1: current
beta: angle i1 vs up [deg]
r1: resistance
xd: reactance in direct axis
xq: reactance in quadrature axis"""
i1d, i1q = (i1*np.sin(beta/180*np.pi), i1*np.cos(beta/180*np.pi))
uxdq = ((r1*i1d - xq*i1q), (r1*i1q + xd*i1d))
__phasor_plot(ax, up, (i1d, i1q), uxdq)
def iqd_phasor(up, iqd, uqd, ax=0):
"""creates a phasor plot
up: internal voltage
iqd: current
uqd: terminal voltage"""
uxdq = (uqd[1]/np.sqrt(2), (uqd[0]/np.sqrt(2)-up))
__phasor_plot(ax, up, (iqd[1]/np.sqrt(2), iqd[0]/np.sqrt(2)), uxdq)
def phasor(bch, ax=0):
"""create phasor plot from bch"""
f1 = bch.machine['p']*bch.dqPar['speed']
w1 = 2*np.pi*f1
xd = w1*bch.dqPar['ld'][-1]
xq = w1*bch.dqPar['lq'][-1]
r1 = bch.machine['r1']
i1beta_phasor(bch.dqPar['up'][-1],
bch.dqPar['i1'][-1], bch.dqPar['beta'][-1],
r1, xd, xq, ax)
def airgap(airgap, ax=0):
"""creates plot of flux density in airgap"""
if ax == 0:
ax = plt.gca()
ax.set_title('Airgap Flux Density [T]')
ax.plot(airgap['pos'], airgap['B'],
label='Max {:4.2f} T'.format(max(airgap['B'])))
ax.plot(airgap['pos'], airgap['B_fft'],
label='Base Ampl {:4.2f} T'.format(airgap['Bamp']))
ax.set_xlabel('Position/°')
ax.legend()
ax.grid(True)
def airgap_fft(airgap, bmin=1e-2, ax=0):
"""plot airgap harmonics"""
unit = 'T'
if ax == 0:
ax = plt.gca()
ax.set_title('Airgap Flux Density Harmonics / {}'.format(unit))
ax.grid(True)
order, fluxdens = np.array([(n, b) for n, b in zip(airgap['nue'],
airgap['B_nue']) if b > bmin]).T
try:
markerline1, stemlines1, _ = ax.stem(order, fluxdens, '-.', basefmt=" ",
use_line_collection=True)
ax.set_xticks(order)
except ValueError: # empty sequence
pass
def torque(pos, torque, ax=0):
"""creates plot from torque vs position"""
k = 20
alpha = np.linspace(pos[0], pos[-1],
k*len(torque))
f = ip.interp1d(pos, torque, kind='quadratic')
unit = 'Nm'
scale = 1
if np.min(torque) < -9.9e3 or np.max(torque) > 9.9e3:
scale = 1e-3
unit = 'kNm'
if ax == 0:
ax = plt.gca()
ax.set_title('Torque / {}'.format(unit))
ax.grid(True)
ax.plot(pos, [scale*t for t in torque], 'go')
ax.plot(alpha, scale*f(alpha))
if np.min(torque) > 0 and np.max(torque) > 0:
ax.set_ylim(bottom=0)
elif np.min(torque) < 0 and np.max(torque) < 0:
ax.set_ylim(top=0)
def torque_fft(order, torque, ax=
|
nke001/attention-lvcsr
|
libs/Theano/theano/gof/__init__.py
|
Python
|
mit
| 2,704
| 0
|
"""
gof.py
gof stands for Graph Optimization Framework
The gof submodule of theano implements a framework
for manipulating programs described as graphs. The
gof module defines basic theano graph concepts:
-Apply nodes, which represent the application
of an Op to Variables. Together these make up a
graph.
-The Type, needed for Variables to make sense
-The FunctionGraph, which defines how a subgraph
should be interpreted to implement a function
-The Thunk, a callable object that becames part
of the executable emitted by theano
-Linkers/VMs, the objects that call Thunks in
sequence in order to execute a theano program
Conceptually, gof is intended to be sufficiently abstract
that it could be used to implement a language other than
theano. ie, theano is a domain-specific language for
numerical computation, created by implementing
tensor Variables and Ops that perform mathematical functions.
A different kind of domain-specific language could be
made by using gof with different Variables and Ops.
In practice, gof and the rest of theano are somewhat more
tightly intertwined.
Currently, gof also contains much of the C compilation
functionality. Ideally this should be refactored into
a different submodule.
For more details and discussion, see the theano-dev
e-mail thread "What is gof?"
"""
from theano.gof.cc import \
CLinker, OpWiseCLinker, DualLinker, HideC
# Also adds config vars
from theano.gof.compiledir import \
local_bitwidth, python_int_bitwidth
from theano.gof.fg import \
CachedConstantError, InconsistencyError, MissingInputError, FunctionGraph
from theano.gof.destroyhandler import \
DestroyHandler
from theano.gof.graph import \
Apply, Variable, Constant, view_roots
from theano.gof.link import \
Container, Linker, LocalLinker, PerformLinker,
|
WrapLinker, WrapLinkerMany
from
|
theano.gof.op import \
Op, OpenMPOp, PureOp, COp, ops_with_inner_function
from theano.gof.opt import (
Optimizer,
optimizer, inplace_optimizer,
SeqOptimizer,
MergeOptimizer,
LocalOptimizer, local_optimizer, LocalOptGroup,
OpSub, OpRemove, PatternSub,
NavigatorOptimizer, TopoOptimizer, EquilibriumOptimizer,
OpKeyOptimizer)
from theano.gof.optdb import \
DB, Query, \
EquilibriumDB, SequenceDB, ProxyDB
from theano.gof.toolbox import \
Feature, \
Bookkeeper, History, Validator, ReplaceValidate, NodeFinder,\
PrintListener, ReplacementDidntRemovedError, NoOutputFromInplace
from theano.gof.type import \
Type, Generic, generic
from theano.gof.utils import \
hashtype, object2, MethodNotDefined
import theano
if theano.config.cmodule.preload_cache:
cc.get_module_cache()
|
appop/bitcoin
|
qa/rpc-tests/test_framework/siphash.py
|
Python
|
mit
| 2,010
| 0.001493
|
#!/usr/bin/env python3
# Copyright (c) 2016 The nealcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Specialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v
|
1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphas
|
h_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3
|
Teamxrtc/webrtc-streaming-node
|
third_party/webrtc/src/chromium/src/third_party/closure_compiler/processor_test.py
|
Python
|
mit
| 3,825
| 0.00366
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test resources processing, i.e. <if> and <include> tag handling."""
import unittest
from processor import FileCache, Processor, LineNumber
class ProcessorTest(unittest.TestCase):
"""Test <include> tag processing logic."""
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.maxDiff = None
def setUp(self):
FileCache._cache["/debug.js"] = """
// Copyright 2002 Older Chromium Author dudes.
function debug(msg) { if (window.DEBUG) alert(msg); }
""".strip()
FileCache._cache["/global.js"] = """
// Copyright 2014 Old Chromium Author dudes.
<include src="/debug.js">
var global = 'type checking!';
""".strip()
FileCache._cache["/checked.js"] = """
// Copyright 2028 Future Chromium Author dudes.
/**
* @fileoverview Coolest app ever.
* @author Douglas Crockford (douglas@crockford.com)
*/
<include src="/global.js">
debug(global);
// Here continues checked.js, a swell file.
""".strip()
FileCache._cache["/double-debug.js"] = """
<include src="/debug.js">
<include src="/debug.js">
""".strip()
self._processor = Processor("/checked.js")
def testInline(self):
self.assertMultiLineEqual("""
// Copyright 2028 Future Chromium Author dudes.
/**
* @fileoverview Coolest app ever.
* @author Douglas Crockford (douglas@crockford.com)
*/
// Copyright 2014 Old Chromium Author dudes.
// Copyright 2002 Older Chromium Author dudes.
function debug(msg) { if (window.DEBUG) alert(msg); }
var global = 'type checking!';
debug(global);
// Here continues checked.js, a swell file.
""".strip(), self._processor.contents)
de
|
f assertLineNumber(self, abs_line, expected_line):
actual_line = self._processor.get_file_from_line(abs_line)
self.assertEqual(expected_line.file, actual_line.file)
self.a
|
ssertEqual(expected_line.line_number, actual_line.line_number)
def testGetFileFromLine(self):
"""Verify that inlined files retain their original line info."""
self.assertLineNumber(1, LineNumber("/checked.js", 1))
self.assertLineNumber(5, LineNumber("/checked.js", 5))
self.assertLineNumber(6, LineNumber("/global.js", 1))
self.assertLineNumber(7, LineNumber("/debug.js", 1))
self.assertLineNumber(8, LineNumber("/debug.js", 2))
self.assertLineNumber(9, LineNumber("/global.js", 3))
self.assertLineNumber(10, LineNumber("/checked.js", 7))
self.assertLineNumber(11, LineNumber("/checked.js", 8))
def testIncludedFiles(self):
"""Verify that files are tracked correctly as they're inlined."""
self.assertEquals(set(["/global.js", "/debug.js"]),
self._processor.included_files)
def testDoubleIncludedSkipped(self):
"""Verify that doubly included files are skipped."""
processor = Processor("/double-debug.js")
self.assertEquals(set(["/debug.js"]), processor.included_files)
self.assertEquals(FileCache.read("/debug.js") + "\n", processor.contents)
class IfStrippingTest(unittest.TestCase):
"""Test that the contents of XML <if> blocks are stripped."""
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.maxDiff = None
def setUp(self):
FileCache._cache["/century.js"] = """
function getCurrentCentury() {
<if expr="netscape_os">
alert("Oh wow!");
return "XX";
</if>
return "XXI";
}
""".strip()
self.processor_ = Processor("/century.js")
def testIfStripping(self):
self.assertMultiLineEqual("""
function getCurrentCentury() {
alert("Oh wow!");
return "XX";
return "XXI";
}
""".strip(), self.processor_.contents)
if __name__ == '__main__':
unittest.main()
|
kreatorkodi/repository.torrentbr
|
plugin.video.youtube/resources/lib/youtube_plugin/kodion/utils/access_manager.py
|
Python
|
gpl-2.0
| 12,104
| 0.003057
|
import uuid
import time
from hashlib import md5
from ..json_store import LoginTokenStore
__author__ = 'bromix'
class AccessManager(object):
def __init__(self, context):
self._settings = context.get_settings()
self._jstore = LoginTokenStore()
self._json = self._jstore.get_data()
self._user = self._json['access_manager'].get('current_user', '0')
self._last_origin = self._json['access_manager'].get('last_origin', 'plugin.video.youtube')
def get_current_user_id(self):
"""
:return: uuid of the current user
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'][self.get_user()]['id']
def get_new_user(self, user_name='', addon_id=''):
"""
:param user_name: string, users name
:param addon_id: string, addon id
:return: a new user dict
"""
uuids = list()
new_uuid = uuid.uuid4().hex
for k in list(self._json['access_manager']['users'].keys()):
user_uuid = self._json['access_manager']['users'][k].get('id')
if user_uuid:
uuids.append(user_uuid)
while new_uuid in uuids:
new_uuid = uuid.uuid4().hex
return {'access_token': '', 'refresh_token': '', 'token_expires': -1, 'last_key_hash': '',
'name': user_name, 'id': new_uuid, 'watch_later': ' WL', 'watch_history': 'HL'}
def get_users(self):
"""
Returns users
:return: users
"""
return self._json['access_manager'].get('users', {})
def set_users(self, users):
"""
Updates the users
:param users: dict, users
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'] = users
self._jstore.save(self._json)
def set_user(self, user, switch_to=False):
"""
Updates the user
:param user: string, username
:param switch_to: boolean, change current user
:return:
"""
self._user = user
if switch_to:
self._json = self._jstore.get_data()
self._json['access_manager']['current_user'] = user
self._jstore.save(self._json)
def get_user(self):
"""
Returns the current user
:return: user
"""
return self._user
def get_watch_later_id(self):
"""
Returns the current users watch later playlist id
:return: the current users watch later playlist id
"""
self._json = self._jstore.get_data()
current_playlist_id = self._json['access_manager']['users'].get(self._user, {}).get('watch_later', ' WL')
settings_playlist_id = self._settings.get_string('youtube.folder.watch_later.playlist', '').strip()
if settings_playlist_id and (current_playlist_id != settings_playlist_id):
self._json['access_manager']['users'][self._user]['watch_later'] = settings_playlist_i
|
d
self._
|
jstore.save(self._json)
self._settings.set_string('youtube.folder.watch_later.playlist', '')
return self._json['access_manager']['users'].get(self._user, {}).get('watch_later', ' WL')
def set_watch_later_id(self, playlist_id):
"""
Sets the current users watch later playlist id
:param playlist_id: string, watch later playlist id
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['watch_later'] = playlist_id
self._settings.set_string('youtube.folder.watch_later.playlist', '')
self._jstore.save(self._json)
def get_watch_history_id(self):
"""
Returns the current users watch history playlist id
:return: the current users watch history playlist id
"""
self._json = self._jstore.get_data()
current_playlist_id = self._json['access_manager']['users'].get(self._user, {}).get('watch_history', 'HL')
settings_playlist_id = self._settings.get_string('youtube.folder.history.playlist', '').strip()
if settings_playlist_id and (current_playlist_id != settings_playlist_id):
self._json['access_manager']['users'][self._user]['watch_history'] = settings_playlist_id
self._jstore.save(self._json)
self._settings.set_string('youtube.folder.history.playlist', '')
return self._json['access_manager']['users'].get(self._user, {}).get('watch_history', 'HL')
def set_watch_history_id(self, playlist_id):
"""
Sets the current users watch history playlist id
:param playlist_id: string, watch history playlist id
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['watch_history'] = playlist_id
self._settings.set_string('youtube.folder.history.playlist', '')
self._jstore.save(self._json)
def set_last_origin(self, origin):
"""
Updates the origin
:param user: string, origin
:param switch_to: boolean, change last origin
:return:
"""
self._last_origin = origin
self._json = self._jstore.get_data()
self._json['access_manager']['last_origin'] = origin
self._jstore.save(self._json)
def get_last_origin(self):
"""
Returns the last origin
:return:
"""
return self._last_origin
def get_access_token(self):
"""
Returns the access token for some API
:return: access_token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'].get(self._user, {}).get('access_token', '')
def get_refresh_token(self):
"""
Returns the refresh token
:return: refresh token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'].get(self._user, {}).get('refresh_token', '')
def has_refresh_token(self):
return self.get_refresh_token() != ''
def is_access_token_expired(self):
"""
Returns True if the access_token is expired otherwise False.
If no expiration date was provided and an access_token exists
this method will always return True
:return:
"""
self._json = self._jstore.get_data()
access_token = self._json['access_manager']['users'].get(self._user, {}).get('access_token', '')
expires = int(self._json['access_manager']['users'].get(self._user, {}).get('token_expires', -1))
# with no access_token it must be expired
if not access_token:
return True
# in this case no expiration date was set
if expires == -1:
return False
now = int(time.time())
return expires <= now
def update_access_token(self, access_token, unix_timestamp=None, refresh_token=None):
"""
Updates the old access token with the new one.
:param access_token:
:param unix_timestamp:
:param refresh_token:
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['access_token'] = access_token
if unix_timestamp is not None:
self._json['access_manager']['users'][self._user]['token_expires'] = int(unix_timestamp)
if refresh_token is not None:
self._json['access_manager']['users'][self._user]['refresh_token'] = refresh_token
self._jstore.save(self._json)
def get_new_developer(self, addon_id):
"""
:param addon_id: string, addon id
:return: a new developer dict
"""
return {'access_token': '', 'refresh_token': '', 'token_expires': -1, 'last_key_hash': ''}
def get_developers(self):
"""
Returns developers
:return: dict, developers
"""
return self._json['access_manager'].get('developers', {})
def set_developers(self, developers):
"""
Updates the
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.