repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
knappador/kivy-netcheck
|
src/netcheck/mockconn.py
|
Python
|
mit
| 1,385
| 0.005054
|
from kivy.logger import Logger
''' Mock for checking the connection. Set success to test '''
class Netcheck():
def __init__(self, prompt=None):
if prompt is None:
prompt = self._no_prompt
self._prompt = prompt
self.MOCK_RESULT=False
self.MOCK_SETTINGS_RESULT=True
def set_prompt(self, fn):
self._prompt = fn
def connection_available(self):
Logger.info('Mock connection check {}'.format(self.MOCK_RESULT))
return self.MOCK_RESULT
def ask_connect(self, callback=None):
callback = callback if callback else lambda *args, **kwargs: None
if self.connection_available():
callback(True)
else:
self._callback = callback
self._prompt(self._open_settings)
def _open_settings(self, try_connect):
L
|
ogger.info('in ask connect callback ' + str(try_connect))
if try_connect:
self._settings_callback()
else:
self._callback(False)
def _settings_callback(self):
#self.MOCK_RESULT=self.MOCK_SETTINGS_RESULT
self._callback(self.MOCK_SETTINGS_RESULT)
def _no_prompt(self, callback):
Logger.warning('No network prompt was set. Cannot ask to connect')
def _set_debug(self, **kwargs):
for k in kwargs:
setattr(se
|
lf, k, kwargs[k])
|
EduPepperPDTesting/pepper2013-testing
|
lms/lib/comment_client/comment_client.py
|
Python
|
agpl-3.0
| 1,852
| 0.00486
|
# Import other classes here so they can be
|
imported from here.
# pylint: disable=W0611
from .comment import Comment
from .thread import Thread
from .user import User
from
|
.commentable import Commentable
from .utils import perform_request
import settings
def search_similar_threads(course_id, recursive=False, query_params={}, *args, **kwargs):
default_params = {'course_id': course_id, 'recursive': recursive}
attributes = dict(default_params.items() + query_params.items())
return perform_request('get', _url_for_search_similar_threads(), attributes, *args, **kwargs)
def search_recent_active_threads(course_id, recursive=False, query_params={}, *args, **kwargs):
default_params = {'course_id': course_id, 'recursive': recursive}
attributes = dict(default_params.items() + query_params.items())
return perform_request('get', _url_for_search_recent_active_threads(), attributes, *args, **kwargs)
def search_trending_tags(course_id, query_params={}, *args, **kwargs):
default_params = {'course_id': course_id}
attributes = dict(default_params.items() + query_params.items())
return perform_request('get', _url_for_search_trending_tags(), attributes, *args, **kwargs)
def tags_autocomplete(value, *args, **kwargs):
return perform_request('get', _url_for_threads_tags_autocomplete(), {'value': value}, *args, **kwargs)
def _url_for_search_similar_threads():
return "{prefix}/search/threads/more_like_this".format(prefix=settings.PREFIX)
def _url_for_search_recent_active_threads():
return "{prefix}/search/threads/recent_active".format(prefix=settings.PREFIX)
def _url_for_search_trending_tags():
return "{prefix}/search/tags/trending".format(prefix=settings.PREFIX)
def _url_for_threads_tags_autocomplete():
return "{prefix}/threads/tags/autocomplete".format(prefix=settings.PREFIX)
|
pymedusa/Medusa
|
tests/test_should_process.py
|
Python
|
gpl-3.0
| 2,950
| 0.000339
|
# coding=utf-8
"""Tests for medusa/test_should_process.py."""
from __future__ import unicode_literals
from medusa.common import Quality
from medusa.post_processor import PostProcessor
import pytest
@pytest.mark.parametrize('p', [
{ # p0: New allowed quality higher than current allowed: yes
'cur_quality': Quality.HDTV,
'new_quality': Quality.HDBLURAY,
'allowed_qualities': [Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY],
'preferred_qualities': [],
'expected': True
},
{ # p1: New quality not in allowed qualities: no
'cur_quality': Quality.HDTV,
'new_quality': Quality.HDBLURAY,
'allowed_qualities': [Quality.HDTV, Quality.HDWEBDL],
'preferred_qualities': [],
'expected': False
},
{ # p2: New allowed quality lower than current allowed: no
'cur_quality': Quality.HDBLURAY,
'new_quality': Quality.HDTV,
'allowed_qualities': [Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY],
'preferred_qualities': [],
'expected': False
},
{ # p3: Preferred quality replacing current allowed: yes
'cur_quality': Quality.HDTV,
'new_quality': Quality.HDBLURAY,
'allowed_qualities': [Quality.HDTV, Quality.HDWEBDL],
'preferred_qualities': [Quality.HDBLURAY],
'expected': True
},
{ # p4: Preferred quality better than current preferred: yes
'cur_quality': Quality.HDWEBDL,
'new_quality': Quality.HDBLURAY,
'allowed_qualities': [Quality.HDTV],
'preferred_qualities': [Quality.HDWEBDL, Quality.HDBLURAY],
'expected': True
},
{ # p5: New quality not in quality system: no
'cur_quality': Quality.HDTV,
'new_quality': Quality.SDTV,
'allowed_qualities': [Quality.HDTV],
'preferred_qualities': [Quality.HDWEBDL, Quality.HDBLURAY],
'expected': False
},
{ # p6: Preferred lower quality replacing current higher allowed: yes
'cur_quality': Quality.HDWEBDL,
'new_quality': Quality.HDTV,
'allowed_qualities': [Quality.HDWEBDL],
'preferred_qualities': [Quality.HDTV],
'expected': True
},
{ # p7: Current quality is NA: yes
'cur_quality': Quality
|
.NA,
'new_quality': Quality.HDTV,
'allowed_qualities': [
|
Quality.HDWEBDL],
'preferred_qualities': [Quality.HDTV],
'expected': True
},
])
def test_should_process(p):
"""Run the test."""
# Given
current_quality = p['cur_quality']
new_quality = p['new_quality']
allowed_qualities = p['allowed_qualities']
preferred_qualities = p['preferred_qualities']
expected = p['expected']
# When
replace, msg = PostProcessor._should_process(current_quality, new_quality, allowed_qualities, preferred_qualities)
actual = replace
# Then
if expected != actual:
print(msg)
assert expected == actual
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/services/campaign_bid_modifier_service/transports/__init__.py
|
Python
|
apache-2.0
| 1,099
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by
|
applicable law or agreed to in writing, software
# distributed under
|
the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import CampaignBidModifierServiceTransport
from .grpc import CampaignBidModifierServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[CampaignBidModifierServiceTransport]]
_transport_registry["grpc"] = CampaignBidModifierServiceGrpcTransport
__all__ = (
"CampaignBidModifierServiceTransport",
"CampaignBidModifierServiceGrpcTransport",
)
|
imjonsnooow/vivisect
|
vivisect/qt/remote.py
|
Python
|
apache-2.0
| 7,037
| 0.005684
|
from PyQt4 import Qt, QtCore, QtGui
import vqt.main as vq_main
import vqt.tree as vq_tree
import envi.threads as e_threads
import cobra.remoteapp as c_remoteapp
import vivisect.remote.server as viv_server
from vqt.basics import *
class WorkspaceListModel(vq_tree.VQTreeModel):
columns = ('Name',)
class WorkspaceListView(vq_tree.VQTreeView):
def __init__(self, workspaces, parent=None):
vq_tree.VQTreeView.__init__(self, parent=parent)
model = WorkspaceListModel(parent=self)
self.setModel(model)
for wsname in workspaces:
model.append((wsname,))
class VivServerDialog(QtGui.QDialog):
def __init__(self, workspaces, parent=None):
QtGui.QDialog.__init__(self, parent=parent)
self.setWindowTitle('Select a workspace...')
self.wsname = None
self.wslist = WorkspaceListView(workspaces, parent=self)
self.buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
self.buttons.accepted.connect( self.accept )
self.buttons.rejected.connect( self.reject )
layout = VBox()
layout.addWidget(self.wslist)
layout.addWidget(self.buttons)
self.setLayout(layout)
self.wslist.doubleClicked.connect( self.workspaceActivated )
def getWorkspaceName(self):
self.exec_()
return self.wsname
def workspaceActivated(self, idx):
self.accept()
def accept(self):
for idx in self.wslist.selectedIndexes():
row = idx.internalPointer()
if row:
self.wsname = row.rowdata[0]
break
return QtGui.QDialog.accept(self)
class VivSaveServerDialog(QtGui.QDialog):
def __init__(self, vw, parent=None):
QtGui.QDialog.__init__(self, parent=parent)
self.setWindowTitle('Save to Workspace Server...')
self.vw = vw
try:
server = vw.config.remote.server
except AttributeError:
server = "visi.kenshoto.com"
self.wsname = QtGui.QLineEdit(vw.getMeta('StorageName',''), parent=self)
self.wsserver = QtGui.QLineEdit(server, parent=self)
self.setdef = QtGui.QCheckBox(parent=self)
self.buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
self.buttons.accepted.connect( self.accept )
self.buttons.rejected.connect( self.reject )
serverlayout = QtGui.QHBoxLayout()
serverlayout.addWidget(self.wsserver)
serverlayout.addWidget(QtGui.QLabel('Make Default:'))
serverlayout.addWidget(self.setdef)
layout = QtGui.QFormLayout()
layout.addRow('Workspace Name', self.wsname)
layout.addRow('Workspace Server', serverlayout)
layout.addWidget(self.buttons)
self.setLayout(layout)
def getNameAndServer(self):
if not self.exec_():
return (None,None)
wsname = str(self.wsname.text())
wsserver = str(self.wsserver.text())
return (wsname,wsserver)
def accept(self, *args, **kwargs):
QtGui.QDialog.accept(self, *args, **kwargs)
if self.setdef.isChecked():
cfg = self.vw.config.getSubConfig("remote")
cfg['server'] = str(self.wsserver.text())
self.vw.config.saveConfigFile()
# FIXME: should we combine the VivConnectServerDialog with the VivSaveServerDialog? there are like 10 lines different.
class VivConnectServerDialog(QtGui.QDialog):
def __init__(self, vw, parent=None):
QtGui.QDialog.__init__(self, parent=parent)
self.setWindowTitle('Workspace Server...')
self.vw = vw
try:
server = vw.config.remote.server
except AttributeError:
server = "visi.kenshoto.com"
self.wsserver = QtGui.QLineEdit(server, parent
|
=self)
self.setdef = QtGui.QCheckBox(parent=self)
self.buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
self.buttons.accepted.connect( self.accept )
self.buttons.rejected.connect( self.reject )
serverlayout = QtGui.QHBoxLayout()
serverlayout.addWidget(self.wsserver)
serve
|
rlayout.addWidget(QtGui.QLabel('Make Default:'))
serverlayout.addWidget(self.setdef)
layout = QtGui.QFormLayout()
layout.addRow('Workspace Server', serverlayout)
layout.addWidget(self.buttons)
self.setLayout(layout)
def getServer(self):
if not self.exec_():
return None
wsserver = str(self.wsserver.text())
return wsserver
def accept(self, *args, **kwargs):
QtGui.QDialog.accept(self, *args, **kwargs)
if self.setdef.isChecked():
cfg = self.vw.config.getSubConfig("remote")
cfg['server'] = str(self.wsserver.text())
self.vw.config.saveConfigFile()
@vq_main.idlethread
def openServerAndWorkspace(vw, parent=None):
dia = VivConnectServerDialog(vw, parent=parent)
host = dia.getServer()
if host == None:
return
connServerAndWorkspace(vw, str(host), parent=parent)
@vq_main.workthread
def connServerAndWorkspace(vw, host,parent=None):
# NOTE: do *not* touch parent (or qt) in here!
try:
server = viv_server.connectToServer(host)
wslist = server.listWorkspaces()
selectServerWorkspace(vw, server, wslist, parent=parent)
except Exception, e:
vw.vprint('Server Error: %s' % e)
return
@vq_main.idlethread
def selectServerWorkspace(vw, server, workspaces, parent=None):
dia = VivServerDialog(workspaces, parent=parent)
workspace = dia.getWorkspaceName()
if workspace == None:
return
loadServerWorkspace(vw, server, workspace)
@vq_main.workthread
def loadServerWorkspace(oldvw, server, workspace):
oldvw.vprint('Loading Workspace: %s' % workspace)
vw = viv_server.getServerWorkspace(server, workspace)
import vivisect.qt.main as viv_q_main
viv_q_main.runqt(vw, closeme=oldvw.getVivGui())
@vq_main.idlethread
def saveToServer(vw, parent=None):
dia = VivSaveServerDialog(vw, parent=parent)
wsname,wsserver = dia.getNameAndServer()
vw.vprint('Saving to Workspace Server: %s (%s)' % (wsserver,wsname))
sendServerWorkspace(vw, wsname, wsserver)
@e_threads.firethread
def sendServerWorkspace(vw, wsname, wsserver):
try:
events = vw.exportWorkspace()
server = viv_server.connectToServer(wsserver)
server.addNewWorkspace(wsname, events)
except Exception, e:
vw.vprint('Workspace Server Error: %s' % e)
return
vw.setMeta('WorkspaceServer', wsserver)
def openSharedWorkspace(vw, parent=None):
'''
Open a workspace shared by a vivisect peer.
'''
hostport, ok = QtGui.QInputDialog.getText(parent, 'Shared Workspace...', 'host:port')
if not ok:
return
uri = 'cobra://%s/vivisect.remote.client?msgpack=1' % hostport
c_remoteapp.execRemoteApp(uri)
|
zhuzhidong/StaticAnalysisforCI
|
script/klocwork/__init__.py
|
Python
|
mit
| 74
| 0
|
#!/usr/bin
|
/env python3
# -*- c
|
oding: utf-8 -*-
__author__ = "zhuzhidong"
|
Bauble/bauble.api
|
test/spec/test_user.py
|
Python
|
bsd-3-clause
| 1,763
| 0.002269
|
#import pytest
#import sys
import test.api as api
#import bauble.db as db
from bauble.model.user import User
def xtest_user_json(session):
username = 'test_user_json'
password = username
users = session.query(User).filter_by(username=username)
for user in users:
session.delete(user)
session.commit()
user = User(username=username, password=password)
session.add(user)
session.commit()
def xtest_get_schema():
schema = api.get_resource("/user/schema")
def xtest_resource(session):
"""
Test the server properly /family resources
"""
return
db.set_session_schema(session, session.merge(organization).pg_schema)
families = session.query(Family)
# create a family family
first_family = api.create_resource('/family', {'family': api.get_random_name()})
# get the family
first_family = api.get_resour
|
ce(first_family['ref'])
# query for families
response_json = api.query_resource('/family', q=second_family['family'])
second_family = response_json[0] # we're assuming there's only one
assert second_family['ref'] == second_ref
# delete the created resources
api.delete_resource(first_family['ref'])
|
api.delete_resource(second_family['ref'])
def test_password(session):
username = api.get_random_name()
email = username + '@bauble.io'
password = api.get_random_name()
user = User(email=email, username=username, password=password)
session.add(user)
session.commit()
# test the password isn't stored in plain text
assert user._password != password
# test that we can compare the password against a plain test password
assert user.password == password
session.delete(user)
session.commit()
|
juniwang/open-hackathon
|
open-hackathon-client/src/client/config_docker.py
|
Python
|
mit
| 7,716
| 0.001426
|
# -*- coding: utf-8 -*-
"""
This file is covered by the LICENSING file in the root of this project.
"""
import os
# "javascript" section for javascript. see @app.route('/config.js') in app/views.py
# NOTE: all following key/secrets for test purpose.
ENDPOINT_WEB = os.getenv("ENDPOINT_WEB", "http://localhost") # host name of the UI site
ENDPOINT_HACKATHON_API = "http://" + os.environ["HACKATHON_SERVER"] + ":" + os.environ["HACKATHON_SERVER_PORT"]
GITHUB_CLIENT_ID = os.getenv("GITHUB_CLIENT_ID", "b44f3d47bdeb26b9c4e6")
QQ_CLIENT_ID = os.getenv("QQ_CLIENT_ID", "101200890")
QQ_OAUTH_STATE = os.getenv("QQ_OAUTH_STATE", "openhackathon") # todo state should be constant. Actually it should be unguessable to prevent CSFA
WECHAT_APP_ID = os.getenv("WECHAT_APP_ID", "wxe75b8aef71c2059f")
WECHAT_OAUTH_STATE = os.getenv("WECHAT_OAUTH_STATE", "openhackathon") # NOTE: may be should be same as QQ_OAUTH_STATE?
WEIBO_CLIENT_ID = os.getenv("WEIBO_CLIENT_ID", "479757037")
LIVE_CLIENT_ID = os.getenv("LIVE_CLIENT_ID", "000000004414E0A6")
Config = {
"environment": "local",
"app": {
"secret_key": "secret_key"
},
"login": {
"provider_enabled": ["github", "wechat"],
"session_valid_time_minutes": 60
},
"endpoint": {
"hackathon_web": ENDPOINT_WEB,
"hackathon_api": ENDPOINT_HACKATHON_API
},
"javascript": {
"github": {
"authorize_url": "https://github.com/login/oauth/authorize?client_id=%s&redirect_uri=%s/github&scope=user" % (
GITHUB_CLIENT_ID, ENDPOINT_WEB)
},
"weibo": {
"authorize_url": "https://api.weibo.com/oauth2/authorize?client_id=%s&redirect_uri=%s/weibo&scope=all" % (
WEIBO_CLIENT_ID, ENDPOINT_WEB)
},
"qq": {
"authorize_url": "https://graph.qq.com/oauth2.0/authorize?client_id=%s&redirect_uri=%s/qq&scope=get_user_info&state=%s&response_type=code" % (
QQ_CLIENT_ID, ENDPOINT_WEB, QQ_OAUTH_STATE)
},
"wechat": {
"authorize_url": "https://open.weixin.qq.com/connect/qrconnect?appid=%s&redirect_uri=%s/wechat&response_type=code&scope=snsapi_login&state=%s#wechat_redirect" % (
WECHAT_APP_ID, ENDPOINT_WEB, WECHAT_OAUTH_STATE)
},
"wechat_mobile": {
"authorize_url": "https://open.weixin.qq.com/connect/oauth2/authorize?appid=%s&redirect_uri=%s/wechat&response_type=code&scope=snsapi_base&state=%s#wechat_redirect" % (
WECHAT_APP_ID, ENDPOINT_WEB, WECHAT_OAUTH_STATE)
},
"live": {
"authorize_url": "https://login.live.com/oauth20_authorize.srf?client_id=%s&scope=wl.basic+,wl.emails&response_type=code&redirect_uri=%s/live" % (
LIVE_CLIENT_ID, ENDPOINT_WEB)
},
"hackathon": {
"endpoint": ENDPOINT_HACKATHON_API
},
"apiconfig": {
"proxy": ENDPOINT_HACKATHON_API,
"api": {
"admin": {
"hackathon": {
"": ["get", "post", "put", "delete"],
"checkname": ["get"],
"list": ["get"],
"online": ["post"],
"applyonline": ["post"],
"offline": ["post"],
"tags": ["get", "post", "put", "delete"],
"config": ["get", "post", "put", "delete"],
"administrator": {
"": ["put", "post", "delete"],
"list": ["get"]
},
"template": {
"": ["post", "delete"],
"list": ["get"],
"check": ["get"]
},
"organizer": {
"": ["get", "post", "put", "delete"]
},
"award": {
"": ["get", "post", "put", "delete"],
"list": ["get"]
},
"notice": {
"": ["get", "post", "put", "delete"]
}
},
"registration": {
"": ["get", "post", "delete", "put"],
"list": ["get"]
},
"experiment": {
"list": ["get"],
"": ["post", "put", "delete"]
},
"team": {
"list": ["get"],
"score": {
"list": ["get"]
},
"award": ["get", "post", "delete"]
},
"user": {
"list": ["get"]
},
"hostserver": {
"": ["get", "post", "delete", "put"],
"list": ["get"]
}
},
"template": {
"": ["get", "post", "delete", "put"],
"file": ["post"],
"list": ["get"],
"check": ["get"]
},
"user": {
"": ["get"],
"login": ["post", "delete"],
"experiment": {
"": ["get", "post", "delete", "put"]
},
"registration": {
"": ["put", "post", "ge
|
t"],
"checkemail": ["get"],
"list": ["get"]
},
"profile": {
"": ["post", "put"]
},
"picture": {
"": ["put"]
},
"team": {
"member": ["get"]
},
"hackathon": {
|
"like": ["get", "post", "delete"]
},
"notice": {
"read": ["put"]
},
"show": {
"list": ["get"]
},
"file": {
"": ["post"]
}
},
"hackathon": {
"": ["get"],
"list": ["get"],
"stat": ["get"],
"template": ["get"],
"team": {
"list": ["get"]
},
"registration": {
"list": ["get"]
},
"show": {
"list": ["get"]
},
"grantedawards": ["get"],
"notice": {
"list": ["get"]
}
},
"team": {
"": ["get", "post", "put", "delete"],
"score": ["get", "post", "put"],
"member": {
"": ["post", "put", "delete"],
"list": ["get"]
},
"show": ["get", "post", "delete"],
"template": ["post", "delete"],
"email": ["put"]
},
"talent": {
"list": ["get"]
},
"grantedawards": ["get"]
}
}
}
}
|
5nizza/party-elli
|
helpers/main_helper.py
|
Python
|
mit
| 2,935
| 0.007155
|
import logging
import os
import sys
from typing import List
from logging import FileHandler
from synthesis.z3_via_files import Z3NonInteractiveViaFiles, FakeSolver
from synthesis.z3_via_pipe import Z3InteractiveViaPipes
from third_party.ansistrm import ColorizingStreamHandler
from interfaces.solver_interface import SolverInterface
def get_root_dir() -> str:
#make paths independent of current working directory
rel_path = str(os.path.relpath(__file__))
bosy_dir_toks = ['./'] + rel_path.split(os.sep) # abspath returns 'windows' (not cygwin) path
root_dir = ('/'.join(bosy_dir_toks[:-1]) + '/../') # root dir is two levels up compared to helpers/.
return root_dir
def setup_logging(verbose_level:int=0, filename:str=None, name_processes:bool=False):
level = None
if verbose_level == -1:
level = logging.ERROR
if verbose_level is 0:
level = logging.INFO
elif verbose_level >= 1:
level = logging.DEBUG
if name_processes:
formatter = logging.Formatter(fmt="%(processName)-16s -- %(asctime)-10s%(message)s", datefmt="%H:%M:%S")
else:
formatter = logging.Formatter(fmt="%(asctime)-10s%(message)s", datefmt="%H:%M:%S")
stdout_handler = ColorizingStreamHandler()
stdout_handler.setFormatte
|
r(formatter)
stdout_handler.stream = sys.stdout
if not filename:
filename = 'last.log'
file_handler = FileHandler(filename=filename, mode='w')
file_handler.setFormatter(formatter)
root = logging.getLogger()
root.addHandler(stdout_handler)
root.addHandler(file_handler)
root.setLevel(level)
return logging.getLogger(__name__)
class Z3SolverFactory:
def __init__(self,
smt_tmp_files_prefix:str, z
|
3_path:str,
is_incremental:bool,
generate_queries_only:bool,
remove_files:bool):
self.smt_tmp_files_prefix = smt_tmp_files_prefix
self.z3_path = z3_path
self.is_incremental = is_incremental
self.generate_queries = generate_queries_only
self.remove_files = remove_files
assert not (self.is_incremental and self.generate_queries)
self.solvers = [] # type: List[SolverInterface]
self.seed = 0
def create(self) -> SolverInterface:
self.seed += 1
if self.is_incremental:
solver = Z3InteractiveViaPipes(self.z3_path)
elif self.generate_queries:
solver = FakeSolver(self.smt_tmp_files_prefix+str(self.seed),
self.z3_path)
else:
solver = Z3NonInteractiveViaFiles(self.smt_tmp_files_prefix+str(self.seed),
self.z3_path,
self.remove_files)
self.solvers.append(solver)
return solver
def down_solvers(self):
for s in self.solvers:
s.die()
|
franklingu/leetcode-solutions
|
questions/partition-array-into-disjoint-intervals/Solution.py
|
Python
|
mit
| 1,320
| 0.003788
|
"""
Given an array nums, partition it into two (contiguous) subarrays left and right so that:
Every element in left is less than or equal to every element in right.
left and right are non-empty.
left has the smallest possible size.
Return the length of left after such a partitioning. It is guaranteed that such a partitioning exists.
Example 1:
Input: nums = [5,0,3,8,6]
Output: 3
Explanation: left = [5,0,3], right = [8,6]
Example 2:
Input: nums = [1,1,1,0,6,12]
Output: 4
Explanation: left = [1,1,1,0], right = [6,12]
Note:
2 <= nums.length <= 30000
0 <= nums[i] <= 106
It is guaranteed there is at least one way to partition nums as described.
"""
class Solution(object):
def partitionDisjoint(self, A):
"""
:type A: List[int]
:rtype: int
"""
|
mx, ms = [], []
for n in A:
if not mx:
mx.append(n)
else:
mx.append(max(mx[-1], n))
for n in reve
|
rsed(A):
if not ms:
ms.append(n)
else:
ms.append(min(ms[-1], n))
ms = list(reversed(ms))
for i, n in enumerate(mx):
if i >= len(A) - 1:
continue
n2 = ms[i + 1]
if n2 >= n:
return i + 1
return len(A)
|
haoliangyu/basic-data-structure
|
Heap.py
|
Python
|
mit
| 3,852
| 0.001298
|
class Heap(object):
def __init__(self, data=[]):
if len(data) == 0:
self.data = [None] * 100
else:
self.data = data
self.__size = sum([1 if item is not None else 0 for item in self.data])
self.__heapify()
def size(self):
return self.__size
def empty(self):
return self.__size == 0
def get_max(self):
return self.data[0]
def delete_max(self):
max_data = self.data[0]
self.__swap(0, self.__size - 1)
self.data[self.__size - 1] = None
self.__size -= 1
self.__percolate_down(0)
return max_data
def insert(self, number):
if self.__size == len(self.data):
self.__expand()
self.__size += 1
self.data[self.__size - 1] = number
return self.__percolate_up(self.__size - 1)
@staticmethod
def heap_sort(data):
heap = Heap(data)
index = heap.size() - 1
while not heap.empty():
heap.data[index] = heap.delete_max()
index -= 1
return heap.data
def __percolate_down(self, i):
initial_value = self.data[i]
current_index = i
potential_parent = self.__proper_parent(current_index)
while self.data[potential_parent] > self.data[current_index]:
self.data[current_index] = self.data[potential_parent]
current_index = pot
|
ential_parent
potential_parent = self.__proper_parent(current_index)
self.data[current_index] = initial_value
return current_index
def __percolate_up(self, i):
if not self.__has_parent(i):
return
|
0
initial_value = self.data[i]
parent_indexes = []
h = 1
current_index = i
while self.__has_parent(current_index):
current_index = ((i + 1) >> h) - 1
parent_indexes.append(current_index)
h += 1
lo = 0
hi = len(parent_indexes) - 1
while lo + 1 < hi:
mi = (lo + hi) / 2
if self.data[parent_indexes[mi]] <= self.data[i]:
lo = mi
else:
hi = mi
parent_indexes.insert(0, i)
lo = lo + 1
index = 0
while index < lo:
self.data[parent_indexes[index]] = self.data[parent_indexes[index + 1]]
index += 1
self.data[parent_indexes[lo]] = initial_value
return parent_indexes[lo]
def __expand(self):
new_data = [None] * (self.__size * 2)
for i in range(self.__size):
new_data[i] = self.data[i]
self.data = new_data
def __heapify(self):
i = self.__last_internal()
while self.__in_heap(i):
self.__percolate_down(i)
i -= 1
def __swap(self, i , j):
temp = self.data[i]
self.data[i] = self.data[j]
self.data[j] = temp
def __in_heap(self, i):
return 0 <= i < self.size()
def __parent(self, i):
return (i - 1) >> 1
def __last_internal(self):
return self.__parent(self.size() - 1)
def __left_child(self, i):
return (i << 1) + 1
def __right_child(self, i):
return (i + 1) << 1
def __has_parent(self, i):
return 0 < i
def __has_left_child(self, i):
return self.__in_heap(self.__left_child(i))
def __has_right_child(self, i):
return self.__in_heap(self.__right_child(i))
def __bigger(self, i, j):
return i if self.data[i] > self.data[j] else j
def __proper_parent(self, i):
return self.__bigger(self.__bigger(self.__left_child(i), self.__right_child(i)), i) if self.__has_right_child(i) else \
self.__bigger(self.__left_child(i), i) if self.__has_left_child(i) else \
i
|
GovReady/govready-q
|
controls/migrations/0003_auto_20200417_1418.py
|
Python
|
gpl-3.0
| 598
| 0.001672
|
# Generated by Django 2.2.12 on 2020-04-17 14:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('controls', '0002_commoncontrol_common_control_provider'),
]
operations = [
migrations.RemoveField(
model_name='commoncontrol',
name='legacy_impl_smt',
),
migrations.AddField(
model_name='commoncontrol',
|
name='legacy_imp_stm',
field=models.TextField(blank=True, help_text='Legacy large implementation statement', null=True),
),
]
|
|
wetneb/dissemin
|
deposit/osf/migrations/0001_initial.py
|
Python
|
agpl-3.0
| 1,150
| 0.003478
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-08-28 11:43
fr
|
om django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial
|
= True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('deposit', '0010_email_on_preferences'),
]
operations = [
migrations.CreateModel(
name='OSFDepositPreferences',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('on_behalf_of', models.CharField(blank=True, help_text='If set, deposits will be associated to this OSF account.', max_length=128, null=True, verbose_name='OSF username')),
('repository', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deposit.Repository')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
|
tkupek/tkupek-elearning
|
tkupek_elearning/elearning/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 1,728
| 0.001736
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-31 17:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration
|
):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Option',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=100, null=True)),
('correct', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Question',
|
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=100, null=True)),
('text', models.TextField(null=True)),
('explanation', models.TextField(null=True)),
],
),
migrations.CreateModel(
name='Setting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, null=True)),
('message', models.TextField(null=True)),
('footer', models.TextField(null=True)),
('active', models.BooleanField(default=False, unique=True)),
],
),
migrations.AddField(
model_name='option',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='elearning.Question'),
),
]
|
dcramer/logan
|
tests/logan/settings/tests.py
|
Python
|
apache-2.0
| 1,921
| 0.001562
|
from unittest import TestCase
import mock
from logan.settings import add_settings
class Ad
|
dSettingsTestCase(TestCase):
def test_does_add_settings(self):
class NewSettings(object):
FOO = 'bar'
BAR = 'baz'
settings = mock.Mock()
new_settings = NewSettings()
add_settings(new_settings, settings=settings)
self.assertEquals(getattr(settings, 'FOO', None), 'bar')
self.assertEquals(getattr(settings, 'BAR', None), 'baz')
def test_extra_settings_dont_get_set(self):
cla
|
ss NewSettings(object):
EXTRA_FOO = ('lulz',)
settings = mock.Mock()
settings.FOO = ('foo', 'bar')
new_settings = NewSettings()
add_settings(new_settings, settings=settings)
self.assertFalse(settings.EXTRA_FOO.called)
def test_extra_settings_work_on_tuple(self):
class NewSettings(object):
EXTRA_FOO = ('lulz',)
settings = mock.Mock()
settings.FOO = ('foo', 'bar')
new_settings = NewSettings()
add_settings(new_settings, settings=settings)
self.assertEquals(getattr(settings, 'FOO', None), ('foo', 'bar', 'lulz'))
def test_extra_settings_work_on_list(self):
class NewSettings(object):
EXTRA_FOO = ['lulz']
settings = mock.Mock()
settings.FOO = ['foo', 'bar']
new_settings = NewSettings()
add_settings(new_settings, settings=settings)
self.assertEquals(getattr(settings, 'FOO', None), ['foo', 'bar', 'lulz'])
def test_extra_settings_work_on_mixed_iterables(self):
class NewSettings(object):
EXTRA_FOO = ('lulz',)
settings = mock.Mock()
settings.FOO = ['foo', 'bar']
new_settings = NewSettings()
add_settings(new_settings, settings=settings)
self.assertEquals(getattr(settings, 'FOO', None), ['foo', 'bar', 'lulz'])
|
mbauskar/tele-frappe
|
frappe/utils/verified_command.py
|
Python
|
mit
| 2,109
| 0.02276
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import hmac
import urllib
from frappe import _
import frappe
import frappe.utils
def get_signed_params(params):
"""Sign a url by appending `&_signature=xxxxx` to given params (string or dict).
:param params: String or dict of parameters."""
if
|
not isinstance(params, basestring):
params = urllib.urlencode(params)
signature = hmac.new(params)
signature.update(get_secret())
return params + "&_signature=" + signature.hexdigest()
def get_secret():
return frappe.local.conf.get("secret") or str(frappe.db.get_value("User", "Adminis
|
trator", "creation"))
def verify_request():
"""Verify if the incoming signed request if it is correct."""
query_string = frappe.request.query_string if hasattr(frappe.request, "query_string") \
else frappe.local.flags.signed_query_string
params, signature = query_string.split("&_signature=")
given_signature = hmac.new(params.encode("utf-8"))
given_signature.update(get_secret())
valid = signature == given_signature.hexdigest()
if not valid:
frappe.respond_as_web_page(_("Invalid Link"),
_("This link is invalid or expired. Please make sure you have pasted correctly."))
return valid
def get_url(cmd, params, nonce=None, secret=None):
if not nonce:
nonce = params
signature = get_signature(params, nonce, secret)
params['signature'] = signature
return frappe.utils.get_url("".join(['api/method/', cmd, '?', urllib.urlencode(params)]))
def get_signature(params, nonce, secret=None):
params = "".join((frappe.utils.cstr(p) for p in params.values()))
if not secret:
secret = frappe.local.conf.get("secret") or "secret"
signature = hmac.new(str(nonce))
signature.update(secret)
signature.update(params)
return signature.hexdigest()
def verify_using_doc(doc, signature, cmd):
params = doc.get_signature_params()
return signature == get_signature(params, doc.get_nonce())
def get_url_using_doc(doc, cmd):
params = doc.get_signature_params()
return get_url(cmd, params, doc.get_nonce())
|
AmandaCMS/amanda-cms
|
amanda/offer/models.py
|
Python
|
mit
| 325
| 0.003077
|
fro
|
m django.db import models
class Offer(models.Model):
"""
Describes an offer/advertisement
"""
image = models.URLField()
button_text = models.CharField(max_length=32, null=True, blank=True, help_text="Call to action/Button Text")
url = models.UR
|
LField(help_text="Destination url for this offer")
|
veselosky/schemazoid
|
schemazoid/__init__.py
|
Python
|
apache-2.0
| 221
| 0
|
"""
schemazoid package
"""
# This file should NEVER impor
|
t anything (unless it is from the standard
# librar
|
y). It MUST remain importable by setup.py before any requirements
# have been installed.
__version__ = '0.1.0'
|
OSgroup-wwzz/DFS
|
sync.py
|
Python
|
mit
| 2,802
| 0.00571
|
import os
import subprocess
import json
from multiprocessing import Process,Lock
"""
Upload and download, API handling services provided by rclone.org
VERY IMPORTANT:
it reads config from a JSON file: config.json
drawback: no exception catching
Example:
from:**here**:/home/exampleuser/examplerfolder
to:**there**:
filename(type EOF to stop):myfile
**here** is the name of the local drive you created in the rclone
**there** is another drive
"""
config = json.load(open('config/sync.json'))
class CopyTask:
def __init__(self, filename, frompath, topath):
self.filename = filename
self.frompath = frompath
self.topath = topath
"""
return a nonzero value for success
needs revision
"""
def copy(copytask):
status = subprocess.call(["rclone", "copy", f"{copytask.frompath}/{copytask.filename}", f"{copytask.topath}"])
|
if status < 0:
print(f"Copy process terminated abnormally(status {status})")
return 0
else:
print(f"Copy from {copytask.frompath}/{copytask.filename} to {copytask.topath}/{copytask.filename} comple
|
ted successfully")
return 1
"""
Concurrently manage copy processes which run in parallel
Remove it?
"""
def batch_copy(file_list):
count = 0
count_lock = Lock()
# alive indicates if any process is alive
alive = False
proc_list = [None] * config["MAX_PROCESSES"]
while count < len(file_list) or alive:
alive = False
for i in range(config["MAX_PROCESSES"]):
try:
# Maybe here will be a TypeError or something
if proc_list[i] and proc_list[i].is_alive():
alive = True
continue
else:
proc_list[i].join(0)
count_lock.acquire()
if count < len(file_list):
proc_list[i] = Process(target=copy, args=(file_list[count]))
proc_list[i].start()
alive = True
count += 1
count_lock.release()
except Exception:
count_lock.acquire()
if count < len(file_list):
proc_list[i] = Process(target=copy, args=(file_list[count]))
alive = True
count += 1
proc_list[i].start()
count_lock.release()
print("Batch copy complete")
if __name__=="__main__":
frompath = input("from:")
topath = input("to:")
filelist = []
try:
filename = input("filename(type EOF to stop):")
if filename:
filelist.append(CopyTask(filename, frompath, topath))
except EOFError:
pass
batch_copy(filelist)
|
rew4332/tensorflow
|
tensorflow/contrib/layers/python/layers/feature_column_ops_test.py
|
Python
|
apache-2.0
| 66,277
| 0.00433
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for layers.feature_column_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.contrib.layers.python.layers import feature_column_ops
from tensorflow.python.ops import init_ops
class TransformerTest(tf.test.TestCase):
def testRealValuedColumnIsIdentityTransformation(self):
real_valued = tf.contrib.layers.real_valued_column("price")
features = {"price": tf.constant([[20.], [110], [-3]])}
output = feature_column_ops._Transformer(features).transform(real_valued)
with self.test_session():
self.assertAllEqual(output.eval(), [[20.], [110], [-3]])
def testBucketizedColumn(self):
bucket = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("price"),
boundaries=[0., 10., 100.])
# buckets 2, 3, 0
features = {"price": tf.constant([[20.], [110], [-3]])}
output = feature_column_ops._Transformer(features).transform(bucket)
with self.test_session():
self.assertAllEqual(output.eval(), [[2], [3], [0]])
def testBucketizedColumnWithMultiDimensions(self):
bucket = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("price", 2),
boundaries=[0., 10., 100.])
# buckets 2, 3, 0
features = {"price": tf.constant([[20., 110], [110., 20], [-3, -3]])}
output = feature_column_ops._Transformer(features).transform(bucket)
with self.test_session():
self.assertAllEqual(output.eval(), [[2, 3], [3, 2], [0, 0]])
def testCachedTransformation(self):
bucket = tf.contrib.layers.bucketized_column(
tf.contrib.layers.real_valued_column("price"),
boundaries=[0., 10., 100.])
# buckets 2, 3, 0
features = {"price": tf.constant([[20.], [110], [-3]])}
transformer = feature_column_ops._Transformer(features)
with self.test_session() as sess:
transformer.transform(bucket)
num_of_ops = len(sess.graph.get_operations())
# Verify that the second call to transform the same feature
# doesn't increase the number of ops.
transformer.transform(bucket)
self.assertEqual(num_of_ops, len(sess.graph.get_operations()))
def testSparseColumnWithHashBucket(self):
hashed_sparse = tf.contrib.layers.sparse_column_with_hash_bucket("wire", 10)
wire_tensor = tf.SparseTensor(values=["omar", "stringer", "marlo"],
indices=[[0, 0], [1, 0], [1, 1]],
shape=[2, 2])
features = {"wire": wire_tensor}
output = feature_column_ops._Transformer(features).transform(hashed_sparse)
with self.test_session():
self.assertEqual(output.values.dtype, tf.int64)
self.assertTrue(all(x < 10 and x >= 0 for x in output.values.eval()))
self.assertAllEqual(output.indices.eval(), wire_tensor.indices.eval())
self.assertAllEqual(output.shape.eval(), wire_tensor.shape.eval())
def testEmbeddingColumn(self):
hashed_sparse = tf.contrib.layers.sparse_column_with_hash_bucket("wire", 10)
wire_tensor = tf.SparseTensor(values=["omar", "stringer", "marlo"],
indices=[[0, 0], [1, 0], [1, 1]],
shape=[2, 2])
features = {"wire": wire_tensor}
output = feature_column_ops._Transformer(features).transform(
tf.contrib.layers.embedding_column(hashed_sparse, 10))
expected = feature_column_ops._Transformer(features).transform(
hashed_sparse)
with self.test_session():
self.assertAllEqual(output.values.eval(), expected.values.eval())
self.assertAllEqual(output.indices.eval(), expected.indices.eval())
self.assertAllEqual(output.shape.eval(), expected.shape.eval())
def testSparseColumnWithKeys(self):
keys_sparse = tf.contrib.layers.sparse_column_with_keys(
"wire", ["marlo", "omar", "stringer"])
wire_tensor = tf.SparseTensor(values=["omar", "stringer", "marlo"],
indices=[[0, 0], [1, 0], [1, 1]],
shape=[2, 2])
features = {"wire": wire_tensor}
output = feature_column_ops._Transformer(features).transform(keys_sparse)
with self.test_session():
tf.initialize_all_tables().run()
self.assertEqual(output.values.dtype, tf.int64)
self.assertAllEqual(output.values.eval(), [1, 2, 0])
self.assertAllEqual(output.indices.eval(), wire_tensor.indices.eval())
self.assertAllEqual(output.shape.eval(), wire_tensor.shape.eval())
def testSparseColumnWithHashBucket_IsIntegerized(self):
hashed_sparse = tf.contrib.layers.sparse_column_with_integerized_feature(
"wire", 10)
wire_tensor = tf.SparseTensor(values=[100, 1,
|
25],
indices=[[0,
|
0], [1, 0], [1, 1]],
shape=[2, 2])
features = {"wire": wire_tensor}
output = feature_column_ops._Transformer(features).transform(hashed_sparse)
with self.test_session():
self.assertEqual(output.values.dtype, tf.int32)
self.assertTrue(all(x < 10 and x >= 0 for x in output.values.eval()))
self.assertAllEqual(output.indices.eval(), wire_tensor.indices.eval())
self.assertAllEqual(output.shape.eval(), wire_tensor.shape.eval())
def testWeightedSparseColumn(self):
ids = tf.contrib.layers.sparse_column_with_keys(
"ids", ["marlo", "omar", "stringer"])
ids_tensor = tf.SparseTensor(values=["stringer", "stringer", "marlo"],
indices=[[0, 0], [1, 0], [1, 1]],
shape=[2, 2])
weighted_ids = tf.contrib.layers.weighted_sparse_column(ids, "weights")
weights_tensor = tf.SparseTensor(values=[10.0, 20.0, 30.0],
indices=[[0, 0], [1, 0], [1, 1]],
shape=[2, 2])
features = {"ids": ids_tensor,
"weights": weights_tensor}
output = feature_column_ops._Transformer(features).transform(weighted_ids)
with self.test_session():
tf.initialize_all_tables().run()
self.assertAllEqual(output[0].shape.eval(), ids_tensor.shape.eval())
self.assertAllEqual(output[0].indices.eval(), ids_tensor.indices.eval())
self.assertAllEqual(output[0].values.eval(), [2, 2, 0])
self.assertAllEqual(output[1].shape.eval(), weights_tensor.shape.eval())
self.assertAllEqual(output[1].indices.eval(),
weights_tensor.indices.eval())
self.assertEqual(output[1].values.dtype, tf.float32)
self.assertAllEqual(output[1].values.eval(), weights_tensor.values.eval())
def testCrossColumn(self):
language = tf.contrib.layers.sparse_column_with_hash_bucket(
"language", hash_bucket_size=3)
country = tf.contrib.layers.sparse_column_with_hash_bucket(
"country", hash_bucket_size=5)
country_language = tf.contrib.layers.crossed_column(
[language, country], hash_bucket_size=15)
features = {
"language": tf.SparseTensor(values=["english", "spanish"],
indices=[[0, 0], [1, 0]],
shape=[2, 1]),
"country": tf.SparseTensor(values=["US", "SV"],
indices=[[0, 0], [1, 0]],
shape=[2, 1])
}
output = feature_column_ops._Transformer(features).transform(
count
|
baby-factory/baby-ai
|
main.py
|
Python
|
mit
| 3,197
| 0.034696
|
# encoding: utf-8
#这里放置主程序以及IO
from numpy import *
from utils.tools import loadvoc
from keras.models import Sequential,load_model,Model
from keras.layers import Input, Embedding, LSTM, Dense, merge, RepeatVector,TimeDistributed,Masking
from keras.optimizers import SGD,Adam
from keras.utils.np_utils import to_categorical
import threading
import time
rlock = threading.RLock()
#编码与解码文字
#i2c, c2i = loadvoc()
ss="1234567890-=qwertyuiopasdfghjkl;'zxcvbnm,."
i2c={}
c2i={}
for i in range(len(ss)):
i2c[i]=ss[i]
c2i[ss[i]]=i
#模型参数设置
VOC = len(i2c) #最大词汇数目
SEN = 20 #句子最大长度
INPUT=['',''] #输入的句子缓存
SPEAK_OUTPUT='' #输出的言语缓存
#将句子转化成数字
def s2i(s,l=SEN):
idx=zeros([1,l],dtype=int32)
for i in range(min(l,len(s))):
idx[0,i]=c2i.get(s[i],0)
return idx
def i2s(idx):
s=''
for i in idx[0,:]:
if i>0:
s.join(i2c.get(i,''))
return s
#
|
定义主模型
#输入层
|
main_input = Input(shape=(SEN,), dtype='int32', name='main_input')
#文字矢量化层
x = Masking(mask_value=0)(main_input)
x = Embedding(output_dim=VOC, input_dim=VOC, input_length=SEN)(x)
#长短记忆层
lstm_out = LSTM(128)(x)
#生物钟,当前时间信息输入[hr,min]
time_input = Input(shape=(2,), name='time_input')
#生物钟激活函数
time_out = Dense(128, activation='sigmoid')(time_input)
#生物钟作为阀门
x = merge([lstm_out, time_out], mode='mul')
# 语言逻辑深层网络
x = Dense(128, activation='relu')(x)
# 时序言语输出
x = RepeatVector(SEN)(x)
speak_output = TimeDistributed(Dense(VOC, activation='sigmoid'),name='speak_output')(x)
#speak_output = LSTM(VOC,activation='softmax', name='speak_output',return_sequences=True)(x)
# 模型封装
model = Sequential()
model.add(Model(input=[main_input, time_input], output=speak_output))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
#模型训练-循环控制
POWER_OFF = False
def run():
global INPUT,SPEAK_OUTPUT,POWER_OFF
while not POWER_OFF:
#读取输入数据进行训练
if len(INPUT[0]) == 0:
with rlock:
INPUT[1] = INPUT[0]
INPUT[0] = SPEAK_OUTPUT
X = s2i(INPUT[1])
Y = s2i(INPUT[0])
#读取系统时间
tm = time.localtime()
TIME_INPUT = asarray([[tm.tm_hour,tm.tm_min]],dtype=int32)
Y=zeros([1,SEN,VOC],dtype=int32)
Y[0]=to_categorical(X[0],VOC)
model.fit([X, TIME_INPUT],Y,
nb_epoch=1, batch_size=1,verbose=0)
SPEAK_OUTPUT=i2s(model.predict_classes([X,TIME_INPUT],verbose=0))
if len(SPEAK_OUTPUT)>0:
print('A: '+SPEAK_OUTPUT)
time.sleep(1)
def say():
global INPUT,SPEAK_OUTPUT,POWER_OFF
while not POWER_OFF:
a=raw_input('Q: ')
if a == u'end':
POWER_OFF = a
else:
INPUT[1] = INPUT[0]
INPUT[0] = a
threading.Thread(target = run, args = (), name = 'run').start()
threading.Thread(target = say, args = (), name = 'say').start()
|
minhphung171093/GreenERP_V9
|
openerp/addons/base/__openerp__.py
|
Python
|
gpl-3.0
| 2,841
| 0.000352
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Base',
'version': '1.3',
'category': 'Hidden',
'description': """
The kernel of OpenERP, needed for all installation.
===================================================
""",
'author': 'OpenERP SA',
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': [],
'data': [
'base_data.xml',
'res/res_currency_data.xml',
'res/res_country_data.xml',
'security/base_security.xml',
'base_menu.xml',
'res/res_config.xml',
'res/res.country.state.csv',
'ir/ir_actions.xml',
'ir/ir_config_parameter_view.xml',
'ir/ir_cron_view.xml',
'ir/ir_filters.xml',
'ir/ir_mail_server_view.xml',
'ir/ir_model_view.xml',
'ir/ir_attachment_view.xml',
'ir/ir_rule_view.xml',
'ir/ir_sequence_view.xml',
'ir/ir_translation_view.xml',
'ir/ir_ui_menu_view.xml',
'ir/ir_ui_view_view.xml',
'ir/ir_values_view.xml',
'ir/ir_autovacuum.xml',
'ir/ir_model_report.xml',
'ir/report_ir_model.xml',
'ir/ir_logging_view.xml',
'ir/ir_qweb.xml',
'workflow/workflow_view.xml',
'module/module_view.xml',
'module/module_data.xml',
'module/module_report.xml',
'module/report/report_ir_module_reference.xml',
'module/wizard/base_module_update_view.xml',
'module/wizard/base_language_install_view.xml',
'module/wizard/base_import_language_view.xml',
'module/wizard/base_module_upgrade_view.xml',
'module/wizard/base_module_configuration_view.xml',
'module/wizard/base_export_language_view.xml',
'module/wizard/base_update_translations_view.xml',
'module/wizard/base_module_immediate_install.xml',
'res/res_company_view.xml',
'res/res_request_view.xml',
'res/res_lang_view.xml',
'res/res_partner_view.xml',
'res/res_bank_view.xml',
'res/res_country_view.xml',
'res/res_currency_view.xml',
'res/res_users_view.xml',
'res/res_partner_dat
|
a.xml',
'res/ir_property_view.xml',
'res/res_security.xml',
'security/ir.model.access.csv',
],
'demo': [
'base_demo.xml',
'res/res_currency_demo.xml',
'res/res_bank_demo.xml',
'res/res_partner_demo.xml',
'res/res_partner_demo.yml',
'res/res_partner_image_demo.xml',
],
'test': [
'tests/base_test.yml',
'tests/test_osv_expression.yml',
'tests/test_ir_rule.yml', # <-- These tests modify/add/delete ir_
|
rules.
],
'installable': True,
'auto_install': True,
'post_init_hook': 'post_init',
}
|
wegamekinglc/Finance-Python
|
PyFin/Math/Distributions/__init__.py
|
Python
|
mit
| 429
| 0.002331
|
# -*- coding: utf-8 -*-
u"""
Created on 2015-7-23
@author: cheng.li
"""
from PyFin.Math.Distributions.No
|
rmalDistribution import NormalDistribution
from PyFin.Math.Distributions.NormalDistribution import CumulativeNormalDistribution
from PyFin.Math.Distributions.NormalDistribution import InverseCumulativeNormal
__all__ = ['NormalDistribution',
|
'CumulativeNormalDistribution',
'InverseCumulativeNormal']
|
ckuehnel/pycom
|
blink.py
|
Python
|
gpl-3.0
| 701
| 0.018545
|
#Hello World from pycom LoPy
import machine, pycom, time, sys, uos
pycom.heartbeat(False)
|
print("")
print("Hello World from pycom LoPy")
print("Running Python %s on %s" %(sys.version, uos.uname() [4]))
print("CPU clock = %d MHz" %(int(machine.freq()[0]/1000/1000)))
print("On-board RGB LED will bl
|
ink 10 times")
def blink():
pycom.rgbled(0x007f00) # green
time.sleep(0.15)
pycom.rgbled(0x7f7f00) # yellow
time.sleep(0.15)
pycom.rgbled(0x7f0000) # red
time.sleep(0.15)
pycom.rgbled(0) # off
time.sleep(.55)
return
for x in range(0, 10):
print("*", end="")
blink()
print("")
pycom.heartbeat(True)
|
iamweilee/pylearn
|
pickle-example-1.py
|
Python
|
mit
| 509
| 0.003929
|
'''
pickle Ä£¿éͬ marshal Ä£¿éÏàͬ, ½«Êý¾ÝÁ¬Ðø»¯, ±ãÓÚ±£´æ´«Êä.
Ëü±È marshal ÒªÂýһЩ, µ«Ëü¿ÉÒÔ´¦ÀíÀàʵÀý, ¹²ÏíµÄÔªËØ, ÒÔ¼°µÝ¹éÊý¾Ý½á¹¹µÈ.
'''
imp
|
ort pickle
value = (
"this is a string",
[1, 2, 3, 4],
("more tuples", 1.0, 2.3, 4.5),
"this is yet another string"
)
data = pickle.dumps(value)
# intermediate format
print type(data), len(data)
print "-"*50
print data
print "-"*50
print p
|
ickle.loads(data)
'''
²»¹ýÁíÒ»·½Ãæ, pickle ²»ÄÜ´¦Àí code ¶ÔÏó(¿ÉÒÔ²ÎÔÄ copy_reg Ä£¿éÀ´Íê³ÉÕâ¸ö).
'''
|
antoinecarme/pyaf
|
tests/perf/test_long_cycles_nbrows_cycle_length_1000_440.py
|
Python
|
bsd-3-clause
| 88
| 0.022727
|
import tests.perf.test_c
|
ycles_full_long_long as gen
gen.
|
test_nbrows_cycle(1000 , 440)
|
code-google-com/cortex-vfx
|
test/IECoreHoudini/ToHoudiniCurvesConverter.py
|
Python
|
bsd-3-clause
| 45,988
| 0.070888
|
##########################################################################
#
# Copyright (c) 2010-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import hou
import IECore
import IECoreHoudini
import unittest
import os
class TestToHoudiniCurvesConverter( IECoreHoudini.TestCase ) :
__testScene = "test/converterTest.hip"
__curveCoordinates = [
IECore.V3fVectorData( [ IECore.V3f( 2.42892,0,-1.04096 ), IECore.V3f( 1.69011,0,-9.88746 ), IECore.V3f( 5.74288,0,-4.50183 ), IECore.V3f( 2.69113,0,-2.78439 ), IECore.V3f( 5.8923,0,1.53021 ), IECore.V3f( 6.20965,-9.53674e-07,2.03933 ), IECore.V3f( 2.72012,0,2.5738 ), IECore.V3f( 1.76971,0,-0.632637 ) ] ),
IECore.V3fVectorData( [ IECore.V3f( -0.560781,0,-1.04096 ), IECore.V3f( 2.21995,0,-6.31734 ), IECore.V3f( 4.77513,0,-6.61752 ), IECore.V3f( 4.10862,0,-2.78439 ), IECore.V3f( 4.29081,0,1.53021 ), IECore.V3f( 6.20965,-9.53674e-07,3.7489 ), IECore.V3f( -2.61584,0,2.5738 ), IECore.V3f( -1.45801,0,0.780965 ) ] ),
IECore.V3fVectorData( [ IECore.V3f( 2.42892,0,-1.04096 ), IECore.V3f( 2.21995,0,-4.51254 ), IECore.V3f( 4.77513,0,-4.50183 ), IECore.V3f( 6.32944,0,-2.78439 ), IECore.V3f( 7.231,0,1.53021 ), IECore.V3f( 6.20965,-9.53674e-07,3.7489 ), IECore.V3f( 2.72012,0,2.5738 ), IECore.V3f( 1.76971,0,0.780965 ) ] ),
IECore.V3fVectorData( [ IECore.V3f( 5.83427,0,-1.04096 ), IECore.V3f( 2.21995,0,-4.51254 ), IECore.V3f( 6.14141,0,-4.50183 ), IECore.V3f( 7.48932,0,-2.78439 ), IECore.V3f( 9.0197,0,1.53021 ), IECore.V3f( 6.20965,-9.53674e-07,1.2141 ), IECore.V3f( 2.72012,0,2.5738 ), IECore.V3f( 3.23728,0,0.780965 ) ] )
]
def curves( self, basis=IECore.CubicBasisf.linear(), periodic=False, numCurves=4 ) :
vertsPerCurve = IECore.IntVectorData()
pData = IECore.V3fVectorData()
pData.setInterpretation( IECore.GeometricData.Interpretation.Point )
for i in range( 0, numCurves ) :
p = TestToHoudiniCurvesConverter.__curveCoordinates[i%4]
if not periodic and basis == IECore.CubicBasisf.bSpline() :
vertsPerCurve.append( len(p) + 4 )
else :
vertsPerCurve.append( len(p) )
pData.extend( p )
curves = IECore.CurvesPrimitive( vertsPerCurve, basis, periodic )
floatData = IECore.FloatData( 1.5 )
v2fData = IECore.V2fData( IECore.V2f( 1.5, 2.5 ) )
v3fData = IECore.V3fData( IECore.V3f( 1.5, 2.5, 3.5 ) )
color3fData = IECore.Color3fData( IECore.Color3f( 1.5, 2.5, 3.5 ) )
intData = IECore.IntData( 1 )
v2iData = IECore.V2iData( IECore.V2i( 1, 2 ) )
v3iData = IECore.V3iData( IECore.V3i( 1, 2, 3 ) )
stringData = IECore.StringData( "this is a string" )
intRange = range( 1, pData.size()+1 )
floatVectorData = IECore.FloatVectorData( [ x+0.5 for x in intRange ] )
v2fVectorData = IECore.V2fVectorData( [ IECore.V2f( x, x+0.5 ) for x in intRange ] )
v3fVectorData = IECore.V3fVectorData( [ IECore.V3f( x, x+0.5, x+0.75 ) for x in intRange ] )
color3fVectorData = IECore.Color3fVectorData( [ IECore.Color3f( x, x+0.5, x+0.75 ) for x in intRange ] )
intVectorData = IECore.IntVectorData( intRange )
v2iVectorData = IECore.V2iVectorData( [ IECore.V2i( x, -x ) for x in intRange ] )
v3iVectorData = IECore.V3iVectorData( [ IECore.V3i( x, -x, x*2 ) for x in intRange ] )
stringVectorData = IECore.StringVectorData( [ "string number %d!" % x for x in intRange ] )
detailInterpolation = IECore.PrimitiveVariable.Interpolation.Constant
pointInterpolation = IECore.PrimitiveVariable.Interpolation.Vertex
primitiveInterpolation = IECore.PrimitiveVariable.Interpolation.Uniform
# add all valid detail attrib types
curves["floatDetail"] = IECore.PrimitiveVariable( detailInterpolation, floatData )
curves["v2fDetail"] = IECore.PrimitiveVariable( detailInterpolation, v2fData )
curves["v3fDetail"] = IECore.PrimitiveVariable( detailInterpolation, v3fData )
curves["color3fDetail"] = IECore.PrimitiveVariable( detailInterpolation, color3fData )
curves["intDetail"] = IECore.PrimitiveVariable( detailInterpolation, intData )
curves["v2iDetail"] = IECore.PrimitiveVariable( detailInterpolation, v2iData )
curves["v3iDetail"] = IECore.PrimitiveVariable( detailInterpola
|
tion, v3iData )
curves["stringDetail"] = IECore.PrimitiveVariable( detailInterpolation, stringData )
# a
|
dd all valid point attrib types
if not periodic and basis == IECore.CubicBasisf.bSpline() :
modPData = IECore.V3fVectorData()
modPData.setInterpretation( IECore.GeometricData.Interpretation.Point )
floatPointData = IECore.FloatVectorData()
v2fPointData = IECore.V2fVectorData()
v3fPointData = IECore.V3fVectorData()
color3fPointData = IECore.Color3fVectorData()
intPointData = IECore.IntVectorData()
v2iPointData = IECore.V2iVectorData()
v3iPointData = IECore.V3iVectorData()
stringPointData = IECore.StringVectorData()
datas = [ modPData, floatPointData, v2fPointData, v3fPointData, color3fPointData, intPointData, v2iPointData, v3iPointData, stringPointData ]
rawDatas = [ pData, floatVectorData, v2fVectorData, v3fVectorData, color3fVectorData, intVectorData, v2iVectorData, v3iVectorData, stringVectorData ]
pIndex = 0
for i in range( 0, numCurves ) :
for j in range( 0, len(datas) ) :
index = 8*i
datas[j].extend( [ rawDatas[j][index], rawDatas[j][index] ] )
datas[j].extend( rawDatas[j][index:index+8] )
datas[j].extend( [ rawDatas[j][index+7], rawDatas[j][index+7] ] )
curves["P"] = IECore.PrimitiveVariable( pointInterpolation, modPData )
curves["floatPoint"] = IECore.PrimitiveVariable( pointInterpolation, floatPointData )
curves["v2fPoint"] = IECore.PrimitiveVariable( pointInterpolation,v2fPointData )
curves["v3fPoint"] = IECore.PrimitiveVariable( pointInterpolation, v3fPointData )
curves["color3fPoint"] = IECore.PrimitiveVariable( pointInterpolation, color3fPointData )
curves["intPoint"] = IECore.PrimitiveVariable( pointInterpolation, intPointData )
curves["v2iPoint"] = IECore.PrimitiveVariable( pointInterpolation, v2iPointData )
curves["v3iPoint"] = IECore.PrimitiveVariable( pointInterpolation, v3iPointData )
else :
curves["P"] = IECore.PrimitiveVariable( pointInterpolation, pData )
curves["floatPoint"] = IECore.PrimitiveVariable( pointInterpolation, floatVectorData[:8*numCurves] )
curves["v2fPoint"] = IECore.PrimitiveVariable( pointInterpolation, v2fVectorData[:8*numCurves] )
curves["v3fPoint"] = IECore.PrimitiveVariable( pointInterpolation, v3fVectorData[:8*numCurves] )
curves["color3fPoint"] = IECore.PrimitiveVariable( pointInterpolation, color3fV
|
hal0x2328/neo-python
|
neo/Core/UInt256.py
|
Python
|
mit
| 688
| 0.002907
|
from neo.Core.UIntBase import UIntBase
class UInt256(UIntBase):
def __init__(self, data=None):
super(UInt256, self).__init__(num_bytes=32, data=data)
@staticmethod
def ParseString(value):
"""
Parse the input str `value` into UInt256
Raises:
ValueError: if the input `value` length (after '0x' if present) != 64
"""
if value[0:2] == '0x':
value = value[2:]
if not len(value) == 64:
raise ValueError(f"Invalid UInt256 input: {len(value)} chars != 64 chars")
|
reversed_data = bytearray.fromh
|
ex(value)
reversed_data.reverse()
return UInt256(data=reversed_data)
|
olivierfriard/BORIS
|
boris/behavior_binary_table.py
|
Python
|
gpl-3.0
| 12,397
| 0.004195
|
"""
BORIS
Behavioral Observation Research Interactive Software
Copyright 2012-2022 Olivier Friard
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import os
import pathlib
import re
import sys
from decimal import Decimal as dc
import tablib
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QFileDialog, QInputDialog, QMessageBox)
from boris import dialog
from boris import project_functions
from boris import select_observations
from boris import utilities
from boris.config import *
def create_behavior_binary_table(pj: dict, selected_observations: list, parameters_obs: dict,
time_interval: float) -> dict:
"""
create behavior binary table
Args:
pj (dict): project dictionary
selected_observations (list): list of selected observations
parameters_obs (dict): dcit of parameters
time_interval (float): time interval (in seconds)
Returns:
dict: dictionary of tablib dataset
"""
results_df = {}
state_behavior_codes = [
x for x in utilities.state_behavior_codes(pj[ETHOGRAM]) if x in parameters_obs[SELECTED_BEHAVIORS]
]
point_behavior_codes = [
x for x in utilities.point_behavior_codes(pj[ETHOGRAM]) if x in parameters_obs[SELECTED_BEHAVIORS]
]
if not state_behavior_codes and not point_behavior_codes:
return {"error": True, "msg": "No state events selected"}
for obs_id in selected_observations:
start_time = parameters_obs[START_TIME]
end_time = parameters_obs[END_TIME]
# check observation interval
if parameters_obs["time"] == TIME_FULL_OBS:
max_obs_length, _ = project_functions.observation_length(pj, [obs_id])
start_time = dc("0.000")
end_time = dc(max_obs_length)
if parameters_obs["time"] == TIME_EVENTS:
try:
start_time = dc(pj[OBSERVATIONS][obs_id][EVENTS][0][0])
except Exception:
start_time = dc("0.000")
try:
end_time = dc(pj[OBSERVATIONS][obs_id][EVENTS][-1][0])
except Exception:
max_obs_length, _ = project_functions.observation_length(pj, [obs_id])
end_time = dc(max_obs_length)
if obs_id not in results_df:
results_df[obs_id] = {}
for subject in parameters_obs[SELECTED_SUBJECTS]:
# extract tuple (behavior, modifier)
behav_modif_list = [(idx[2], idx[3]) for idx in pj[OBSERVATIONS][obs_id][EVENTS] if idx[1] == (
subject if subject != NO_FOCAL_SUBJECT else "") and idx[2] in parameters_obs[SELECTED_BEHAVIORS]]
# extract observed subjects NOT USED at the moment
observed_subjects = [event[EVENT_SUBJECT_FIELD_IDX] for event in pj[OBSERVATIONS][obs_id][EVENTS]]
# add selected behavior if not found in (behavior, modifier)
if not parameters_obs[EXCLUDE_BEHAVIORS]:
#for behav in state_behavior_codes:
for behav in parameters_obs[SELECTED_BEHAVIORS]:
if behav not in [x[0] for x in behav_modif_list]:
behav_modif_list.append((behav, ""))
behav_modif_set = set(behav_modif_list)
observed_behav = [(x[0], x[1]) for x in sorted(behav_modif_set)]
if parameters_obs[INCLUDE_MODIFIERS]:
results_df[obs_id][subject] = tablib.Dataset(
headers=["time"] + [f"{x[0]}" + f" ({x[1]})" * (x[1] != "") for x in sorted(behav_modif_set)])
else:
results_df[obs_id][subject] = tablib.Dataset(heade
|
rs=["time"] + [x[0] for x in sorted(behav_modif_set)])
if subject == NO_FOCAL_SUBJECT:
sel_subject_dict = {"": {SUBJECT_NAME: ""}}
else:
sel_subject_dict = dict([
(idx, pj[SUBJECTS][idx]) for idx in pj[SUBJECTS] if pj[SUBJECTS][idx][SUBJECT_NAME] == subject
])
|
row_idx = 0
t = start_time
while t <= end_time:
# state events
current_states = utilities.get_current_states_modifiers_by_subject_2(
state_behavior_codes, pj[OBSERVATIONS][obs_id][EVENTS], sel_subject_dict, t)
# point events
current_point = utilities.get_current_points_by_subject(point_behavior_codes,
pj[OBSERVATIONS][obs_id][EVENTS],
sel_subject_dict, t, time_interval)
cols = [float(t)] # time
for behav in observed_behav:
if behav[0] in state_behavior_codes:
cols.append(int(behav in current_states[list(current_states.keys())[0]]))
if behav[0] in point_behavior_codes:
cols.append(current_point[list(current_point.keys())[0]].count(behav))
results_df[obs_id][subject].append(cols)
t += time_interval
row_idx += 1
return results_df
def behavior_binary_table(pj: dict):
"""
ask user for parameters for behavior binary table
call create_behavior_binary_table
"""
_, selected_observations = select_observations.select_observations(
pj, MULTIPLE, "Select observations for the behavior binary table")
if not selected_observations:
return
# check if state events are paired
out = ""
not_paired_obs_list = []
for obs_id in selected_observations:
r, msg = project_functions.check_state_events_obs(obs_id, pj[ETHOGRAM], pj[OBSERVATIONS][obs_id])
if not r:
out += f"Observation: <strong>{obs_id}</strong><br>{msg}<br>"
not_paired_obs_list.append(obs_id)
if out:
out = f"The observations with UNPAIRED state events will be removed from the analysis<br><br>{out}"
results = dialog.Results_dialog()
results.setWindowTitle(f"{programName} - Check selected observations")
results.ptText.setReadOnly(True)
results.ptText.appendHtml(out)
results.pbSave.setVisible(False)
results.pbCancel.setVisible(True)
if not results.exec_():
return
selected_observations = [x for x in selected_observations if x not in not_paired_obs_list]
if not selected_observations:
return
max_obs_length, _ = project_functions.observation_length(pj, selected_observations)
if max_obs_length == -1: # media length not available, user choose to not use events
return
parameters = dialog.choose_obs_subj_behav_category(pj,
selected_observations,
maxTime=max_obs_length,
flagShowIncludeModifiers=True,
flagShowExcludeBehaviorsWoEvents=True,
by_category=False)
if not parameters[SELECTED_SUBJECTS] or not parameters[SELECTED_BEHAVIORS]:
QMessageBox.warning(None, programName, "Select subject(s) and behavior(s) to analyze")
return
# ask for time interval
i, ok = QInputDialog.getDouble(None, "Behavior binary table", "Time interval (in seconds):", 1.0, 0.001, 86400,
|
umutgultepe/spoff
|
yahoo/oauth.py
|
Python
|
gpl-3.0
| 7,530
| 0.008499
|
"""
Yahoo! Python SDK
* Yahoo! Query Language
* Yahoo! Social API
Find documentation and support on Yahoo! Developer Network: http://developer.yahoo.com
Hosted on GitHub: http://github.com/yahoo/yos-social-python/tree/master
@copyright: Copyrights for code authored by Yahoo! Inc. is licensed under the following terms:
@license: BSD Open Source License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Dustin Whittle <dustin@yahoo-inc.com>'
__version__ = '0.1'
import httplib, urllib, urlparse, cgi, oauthlib.oauth
# Yahoo! OAuth APIs
REQUEST_TOKEN_API_URL = 'https://api.login.yahoo.com/oauth/v2/get_request_token'
AUTHORIZATION_API_URL = 'https://api.login.yahoo.com/oauth/v2/request_auth'
ACCESS_TOKEN_API_URL = 'https://api.login.yahoo.com/oauth/v2/get_token'
# http://developer.yahoo.com/oauth/guide/oauth-auth-flow.html
class RequestToken(oauthlib.oauth.OAuthToken):
"""
RequestToken is a data type that represents an end user via a request token.
key -- the token
secret -- the token secret
expires_in -- authorization expiration from issue
request_auth_url -- request token authorization url
"""
key = None
secret = None
expires_in = None
request_auth_url = None
def __init__(self, key, secret, expires_in=None, request_auth_url=None):
self.key = key
self.secret = secret
self.expires_in = expires_in
self.request_auth_url = request_auth_url
def to_string(self):
return urllib.urlencode({'oauth_token': self.key,
'oauth_token_secret': self.secret,
'oauth_expires_in': self.expires_in,
'xoauth_request_auth_url': self.request_auth_url
})
def from_string(s):
"""
Returns a token from something like: oauth_token_secret=xxx&oauth_token=xxx
"""
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
expires_in = params['oauth_expires_in'][0]
request_auth_url = params['xoauth_request_auth_url'][0]
return RequestToken(key, secret, expires_in, request_auth_url)
from_string = staticmethod(from_string)
class AccessToken(oauthlib.oauth.OAuthToken):
"""
AccessToken is a data type that represents an end user via an access token.
key -- the token
secret -- the token secret
expires_in -- authorization expiration from issue
session_handle -- scalable oauth session handle
authorization_expires_in -- authorization expiration timestamp
yahoo_guid -- yahoo guid
"""
key = None
secret = None
expires_in = None
session_handle = None
authorization_expires_in = None
yahoo_guid = None
def __init__(self, key, secret, expires_in=None, session_handle=None, authorization_expires_in=None, yahoo_guid=None):
self.key = key
self.secret = secret
self.expires_in = expires_in
self.session_handle = session_handle
self.authorization_expires_in = authorization_expires_in
self.yahoo_guid = yahoo_guid
def to_string(self):
return urllib.urlencode({'oauth_token': self.key,
'oauth_token_secret': self.secret,
'oauth_expires_in': self.expires_in,
'oauth_session_handle': self.session_handle,
'oauth_authorization_expires_in': self.authorization_expires_in,
'xoauth_yahoo_guid': self.yahoo_guid
})
def from_string(s):
"""
Returns a token from something like: oauth_token_secret=xxx&oauth_token=xxx
"""
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
expires_in = params['oauth_expires_in'][0]
session_handle = params['oauth_session_handle'][0]
authorization_expires_in = params['oauth_authorization_expires_in'][0]
|
yahoo_guid = params['xoauth_yahoo_guid'][0]
return AccessToken(key, secret, expires_in, session_handle, authorization_expires_in, yahoo_guid)
from_string = st
|
aticmethod(from_string)
class Client(oauthlib.oauth.OAuthClient):
def __init__(self, server='https://api.login.yahoo.com/', port=httplib.HTTPS_PORT, request_token_url=REQUEST_TOKEN_API_URL, access_token_url=ACCESS_TOKEN_API_URL, authorization_url=AUTHORIZATION_API_URL):
urlData = urlparse.urlparse(server)
self.server = urlData.netloc
self.port = port
self.request_token_url = request_token_url
self.access_token_url = access_token_url
self.authorization_url = authorization_url
if urlData.scheme == 'https':
self.connection = httplib.HTTPSConnection("%s:%d" % (urlData.netloc, self.port))
else:
self.connection = httplib.HTTPConnection("%s:%d" % (urlData.netloc, self.port))
# self.connection.set_debuglevel(3)
def fetch_request_token(self, oauth_request):
self.connection.request(oauth_request.http_method, self.request_token_url, headers=oauth_request.to_header('yahooapis.com'))
return RequestToken.from_string(self.connection.getresponse().read().strip())
def fetch_access_token(self, oauth_request):
self.connection.request(oauth_request.http_method, self.access_token_url, headers=oauth_request.to_header('yahooapis.com'))
s = self.connection.getresponse().read()
print s
return AccessToken.from_string(s.strip())
def authorize_token(self, oauth_request):
self.connection.request(oauth_request.http_method, self.authorization_url, headers=oauth_request.to_header('yahooapis.com'))
return self.connection.getresponse().read().strip()
def access_resource(self, oauth_request, body = None):
urlData = urlparse.urlparse(oauth_request.get_normalized_http_url())
if urlData.scheme == 'https':
connection = httplib.HTTPSConnection("%s:443" % urlData.netloc)
else:
connection = httplib.HTTPConnection("%s:80" % urlData.netloc)
if oauth_request.http_method == 'GET':
connection.request(oauth_request.http_method, oauth_request.to_url())
elif oauth_request.http_method in ('PUT', 'POST', 'DELETE'):
connection.request(oauth_request.http_method, oauth_request.to_url(), body=body)
else:
connection.request(oauth_request.http_method, oauth_request.to_url())
return connection.getresponse().read().strip()
|
000paradox000/django-dead-base
|
dead_base/forms/__init__.py
|
Python
|
gpl-3.0
| 62
| 0.016129
|
from baseform import *
from export_json_data_to
|
_excel import *
|
|
bthirion/nistats
|
nistats/contrasts.py
|
Python
|
bsd-3-clause
| 9,609
| 0.000208
|
"""
This module is for contrast computation and operation on contrast to
obtain fixed effect results.
Author: Bertrand Thirion, Martin Perez-Guevara, 2016
"""
from warnings import warn
import numpy as np
import scipy.stats as sps
from .utils import z_score
DEF_TINY = 1e-50
DEF_DOFMAX = 1e10
def compute_contrast(labels, regression_result, con_val, contrast_type=None):
""" Compute the specified contrast given an estimated glm
Parameters
----------
labels : array of shape (n_voxels,),
A map of values on voxels used to identify the corresponding model
results : dict,
With keys corresponding to the different labels
values are RegressionResults instances corresponding to the voxels.
con_val : numpy.ndarray of shape (p) or (q, p)
Where q = number of contrast vectors and p = number of regressors.
contrast_type : {None, 't', 'F'}, optional
Type of the contrast. If None, then defaults to 't' for 1D
`con_val` and 'F' for 2D `con_val`
Returns
-------
con : Contrast instance,
Yields the statistics of the contra
|
st (effects, variance, p-values)
|
"""
con_val = np.asarray(con_val)
dim = 1
if con_val.ndim > 1:
dim = con_val.shape[0]
if contrast_type is None:
contrast_type = 't' if dim == 1 else 'F'
acceptable_contrast_types = ['t', 'F']
if contrast_type not in acceptable_contrast_types:
raise ValueError(
'"{0}" is not a known contrast type. Allowed types are {1}'.
format(contrast_type, acceptable_contrast_types))
if contrast_type == 't':
effect_ = np.zeros((1, labels.size))
var_ = np.zeros(labels.size)
for label_ in regression_result:
label_mask = labels == label_
resl = regression_result[label_].Tcontrast(con_val)
effect_[:, label_mask] = resl.effect.T
var_[label_mask] = (resl.sd ** 2).T
elif contrast_type == 'F':
from scipy.linalg import sqrtm
effect_ = np.zeros((dim, labels.size))
var_ = np.zeros(labels.size)
for label_ in regression_result:
label_mask = labels == label_
reg = regression_result[label_]
cbeta = np.atleast_2d(np.dot(con_val, reg.theta))
invcov = np.linalg.inv(np.atleast_2d(
reg.vcov(matrix=con_val, dispersion=1.0)))
wcbeta = np.dot(sqrtm(invcov), cbeta)
rss = reg.dispersion
effect_[:, label_mask] = wcbeta
var_[label_mask] = rss
dof_ = regression_result[label_].df_resid
return Contrast(effect=effect_, variance=var_, dim=dim, dof=dof_,
contrast_type=contrast_type)
def _fixed_effect_contrast(labels, results, con_vals, contrast_type=None):
"""Computes the summary contrast assuming fixed effects.
Adds the same contrast applied to all labels and results lists.
"""
contrast = None
for i, (lab, res, con_val) in enumerate(zip(labels, results, con_vals)):
if np.all(con_val == 0):
warn('Contrast for session %d is null' % i)
continue
contrast_ = compute_contrast(lab, res, con_val, contrast_type)
if contrast is None:
contrast = contrast_
else:
contrast = contrast + contrast_
if contrast is None:
raise ValueError('all contrasts provided were null contrasts')
return contrast
class Contrast(object):
""" The contrast class handles the estimation of statistical contrasts
on a given model: student (t) or Fisher (F).
The important feature is that it supports addition,
thus opening the possibility of fixed-effects models.
The current implementation is meant to be simple,
and could be enhanced in the future on the computational side
(high-dimensional F constrasts may lead to memory breakage).
"""
def __init__(self, effect, variance, dim=None, dof=DEF_DOFMAX, contrast_type='t',
tiny=DEF_TINY, dofmax=DEF_DOFMAX):
"""
Parameters
----------
effect : array of shape (contrast_dim, n_voxels)
the effects related to the contrast
variance : array of shape (n_voxels)
the associated variance estimate
dim: int or None,
the dimension of the contrast
dof : scalar
the degrees of freedom of the residuals
contrast_type: {'t', 'F'}
specification of the contrast type
"""
if variance.ndim != 1:
raise ValueError('Variance array should have 1 dimension')
if effect.ndim != 2:
raise ValueError('Effect array should have 2 dimensions')
self.effect = effect
self.variance = variance
self.dof = float(dof)
if dim is None:
self.dim = effect.shape[0]
else:
self.dim = dim
if self.dim > 1 and contrast_type is 't':
print('Automatically converted multi-dimensional t to F contrast')
contrast_type = 'F'
self.contrast_type = contrast_type
self.stat_ = None
self.p_value_ = None
self.baseline = 0
self.tiny = tiny
self.dofmax = dofmax
def effect_size(self):
"""Make access to summary statistics more straightforward when
computing contrasts"""
return self.effect[0, :]
def effect_variance(self):
"""Make access to summary statistics more straightforward when
computing contrasts"""
return self.variance
def stat(self, baseline=0.0):
""" Return the decision statistic associated with the test of the
null hypothesis: (H0) 'contrast equals baseline'
Parameters
----------
baseline : float, optional
Baseline value for the test statistic
Returns
-------
stat: 1-d array, shape=(n_voxels,)
statistical values, one per voxel
"""
self.baseline = baseline
# Case: one-dimensional contrast ==> t or t**2
if self.contrast_type == 'F':
stat = np.sum((self.effect - baseline) ** 2, 0) / self.dim /\
np.maximum(self.variance, self.tiny)
elif self.contrast_type == 't':
# avoids division by zero
stat = (self.effect - baseline) / np.sqrt(
np.maximum(self.variance, self.tiny))
else:
raise ValueError('Unknown statistic type')
self.stat_ = stat
return stat.ravel()
def p_value(self, baseline=0.0):
"""Return a parametric estimate of the p-value associated
with the null hypothesis: (H0) 'contrast equals baseline'
Parameters
----------
baseline : float, optional
baseline value for the test statistic
Returns
-------
p_values : 1-d array, shape=(n_voxels,)
p-values, one per voxel
"""
if self.stat_ is None or not self.baseline == baseline:
self.stat_ = self.stat(baseline)
# Valid conjunction as in Nichols et al, Neuroimage 25, 2005.
if self.contrast_type == 't':
p_values = sps.t.sf(self.stat_, np.minimum(self.dof, self.dofmax))
elif self.contrast_type == 'F':
p_values = sps.f.sf(self.stat_, self.dim, np.minimum(
self.dof, self.dofmax))
else:
raise ValueError('Unknown statistic type')
self.p_value_ = p_values
return p_values
def z_score(self, baseline=0.0):
"""Return a parametric estimation of the z-score associated
with the null hypothesis: (H0) 'contrast equals baseline'
Parameters
----------
baseline: float, optional,
Baseline value for the test statistic
Returns
-------
z_score: 1-d array, shape=(n_voxels,)
statistical values, one per voxel
"""
if self.p_value_ is None or not self.baseline == baseline:
self.p_value_ = self.p_value(baseline)
|
DK-Git/script.mdm166a
|
resources/lib/charset_map_hd44780_a00.py
|
Python
|
gpl-2.0
| 13,625
| 0.016
|
'''
XBMC LCDproc addon
Copyright (C) 2012 Team XBMC
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
encmap_hd44780_a00 = {
0x0000: 0x0000, # NULL
# 0x0001: 0x0001, # START OF HEADING
# 0x0002: 0x0002, # START OF TEXT
# 0x0003: 0x0003, # END OF TEXT
# 0x0004: 0x0004, # END OF TRANSMISSION
# 0x0005: 0x0005, # ENQUIRY
# 0x0006: 0x0006, # ACKNOWLEDGE
# 0x0007: 0x0007, # BELL
# 0x0008: 0x0008, # BACKSPACE
# 0x0009: 0x0009, # HORIZONTAL TABULATION
# 0x000a: 0x000a, # LINE FEED
# 0x000b: 0x000b, # VERTICAL TABULATION
# 0x000c: 0x000c, # FORM FEED
# 0x000d: 0x000d, # CARRIAGE RETURN
# 0x000e: 0x000e, # SHIFT OUT
# 0x000f: 0x000f, # SHIFT IN
# 0x0010: 0x0010, # DATA LINK ESCAPE
# 0x0011: 0x0011, # DEVICE CONTROL ONE
# 0x0012: 0x0012, # DEVICE CONTROL TWO
# 0x0013: 0x0013, # DEVICE CONTROL THREE
# 0x0014: 0x0014, # DEVICE CONTROL FOUR
# 0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
# 0x0016: 0x0016, # SYNCHRONOUS IDLE
# 0x0017: 0x0017, # END OF TRANSMISSION BLOCK
# 0x0018: 0x0018, # CANCEL
# 0x0019: 0x0019, # END OF MEDIUM
# 0x001a: 0x001a, # SUBSTITUTE
# 0x001b: 0x001b, # ESCAPE
# 0x001c: 0x001c, # INFORMATION SEPARATOR FOUR
# 0x001d: 0x001d, # INFORMATION SEPARATOR THREE
# 0x001e: 0x001e, # INFORMATION SEPARATOR TWO
# 0x001f: 0x001f, # INFORMATION SEPARATOR ONE
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x00a4, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x00b0, # TILDE
0x007f: 0x0020, # DELETE
0x0080: 0x0020,
0x0081: 0x0020,
0x0082: 0x002c,
0x0083: 0x0020,
0x0084: 0x0022,
0x0085: 0x0020,
0x0086: 0x0020,
0x0087: 0x0020,
0x0088: 0x005e,
0x0089: 0x0020,
0x008a: 0x0053,
0x008b: 0x003c,
0x008c: 0x0020,
0x008d: 0x0020,
0x008e: 0x005a,
0x008f: 0x0020,
0x0090: 0x0020,
0x0091: 0x0027,
0x0092: 0x0027,
0x0093: 0x0022,
0x0094: 0x0022,
0x0095: 0x00a5,
0x0096: 0x00b0,
0x0097: 0x00b0,
0x0098: 0x00b0,
0x0099: 0x0020,
0x009a: 0x0073,
0x009b: 0x003e,
0x009c: 0x0020,
0x009d: 0x0020,
0x009e: 0x007a,
0x009f: 0x0059,
|
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x0021, # INVERTED EXCLAMATION MARK
0x00a2: 0x0020, # CENT
|
SIGN
0x00a3: 0x0020, # POUND SIGN
0x00a4: 0x0020, # CURRENCY SIGN
0x00a5: 0x005c, # YEN SIGN
0x00a6: 0x007c, # BROKEN BAR
0x00a7: 0x0020, # SECTION SIGN
0x00a8: 0x0022, # DIAERESIS
0x00a9: 0x0020, # COPYRIGHT SIGN
0x00aa: 0x0020, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ff, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x000e, # NOT SIGN -- FIXME?
0x00ad: 0x000a, # SOFT HYPHEN -- FIXME?
0x00ae: 0x0009, # REGISTERED SIGN -- FIXME?
0x00af: 0x0008, # MACRON -- FIXME?
0x00b0: 0x00df, # DEGREE SIGN
0x00b1: 0x0020, # PLUS-MINUS SIGN
0x00b2: 0x0020, # SUPERSCRIPT TWO
0x00b3: 0x0020, # SUPERSCRIPT THREE
0x00b4: 0x0027, # ACUTE ACCENT
0x00b5: 0x00e4, # MICRO SIGN
0x00b6: 0x0020, # PILCROW SIGN
|
bioinform/somaticseq
|
somaticseq/utilities/dockered_pipelines/somatic_mutations/VarDict.py
|
Python
|
bsd-2-clause
| 11,186
| 0.024406
|
import sys, argparse, os, re
import subprocess
from datetime import datetime
import somaticseq.utilities.dockered_pipelines.container_option as container
from somaticseq._version import __version__ as VERSION
ts = re.sub(r'[:-]', '.', datetime.now().isoformat() )
DEFAULT_PARAMS = {'vardict_image' : 'lethalfang/vardictjava:1.7.0',
'MEM' : '8G',
'threads' : 1,
'normal_bam' : None,
'tumor_bam' : None,
'genome_reference' : None,
'inclusion_region' : None,
'output_directory' : os.curdir,
'outfile' : 'VarDict.vcf',
'action' : 'echo',
'vardict_arguments' : '',
'extra_docker_options' : '',
'script' : 'vardict.{}.cmd'.format(ts),
'min_MQ' : 1,
'minimum_VAF' : 0.05,
'process_bed' : True,
}
def tumor_normal(input_parameters, tech='docker' ):
for param_i in DEFAULT_PARAMS:
if param_i not in input_parameters:
input_parameters[param_i] = DEFAULT_PARAMS[param_i]
# The following are required:
assert os.path.exists( input_parameters['normal_bam'] )
assert os.path.exists( input_parameters['tumor_bam'] )
assert os.path.exists( input_parameters['genome_reference'] )
logdir = os.path.join( input_parameters['output_directory'], 'logs' )
outfile = os.path.join( logdir, input_parameters['script'] )
all_paths = []
for path_i in input_parameters['normal_bam'], input_parameters['tumor_bam'], input_parameters['genome_reference'], input_parameters['output_directory'], input_parameters['inclusion_region']:
if path_i:
all_paths.append( path_i )
container_line, fileDict = container.container_params( input_parameters['vardict_image'], tech=tech, files=all_paths, extra_args=input_parameters['extra_docker_options'] )
minVAF = input_parameters['minimum_VAF']
total_bases = 0
num_lines = 0
if input_parameters['inclusion_region']:
bed_file = input_parameters['inclusion_region']
with open(bed_file) as bed:
line_i = bed.readline().rstrip()
while line_i.startswith('track'):
line_i = bed.readline().rstrip()
while line_i:
item = line_i.rstrip().split('\t')
to
|
tal_bases = total_bases + int(item[2]) - int(item[1])
num_lines += 1
line_i = bed.readline().rstrip()
else:
fai_file = input_parameters['
|
genome_reference'] + '.fai'
bed_file = os.path.join(input_parameters['output_directory'], 'genome.bed')
with open(fai_file) as fai, open(bed_file, 'w') as wgs_bed:
for line_i in fai:
item = line_i.split('\t')
total_bases += int( item[1] )
num_lines += 1
wgs_bed.write( '{}\t{}\t{}\n'.format(item[0], '0', item[1]) )
# However the "bed_file" is defined here, create a dockered line and mount dictionary for it:
bed_split_line, bedDict = container.container_params( 'lethalfang/somaticseq:{}'.format(VERSION), tech, (bed_file, input_parameters['output_directory']) )
# Mounted paths for all the input files and output directory:
mounted_genome_reference = fileDict[ input_parameters['genome_reference'] ]['mount_path']
mounted_tumor_bam = fileDict[ input_parameters['tumor_bam'] ]['mount_path']
mounted_normal_bam = fileDict[ input_parameters['normal_bam'] ]['mount_path']
mounted_outdir = fileDict[ input_parameters['output_directory'] ]['mount_path']
mounted_bed = bedDict[ bed_file ]['mount_path']
with open(outfile, 'w') as out:
out.write( "#!/bin/bash\n\n" )
out.write(f'#$ -o {logdir}\n' )
out.write(f'#$ -e {logdir}\n' )
out.write( '#$ -S /bin/bash\n' )
out.write( '#$ -l h_vmem={}\n'.format( input_parameters['MEM'] ) )
out.write( 'set -e\n\n' )
out.write( 'echo -e "Start at `date +"%Y/%m/%d %H:%M:%S"`" 1>&2\n\n' )
# Decide if Bed file needs to be "split" such that each line has a small enough region
if input_parameters['process_bed'] or total_bases/num_lines > 50000:
out.write(f'{bed_split_line} \\\n' )
out.write( '/opt/somaticseq/somaticseq/utilities/split_mergedBed.py \\\n' )
out.write( '-infile {} -outfile {}/split_regions.bed\n\n'.format(mounted_bed, bedDict[input_parameters['output_directory']]['mount_path']) )
bed_file = '{}/split_regions.bed'.format( mounted_outdir )
out.write(f'{container_line} bash -c \\\n' )
out.write( '"/opt/VarDict-1.7.0/bin/VarDict \\\n' )
if input_parameters['vardict_arguments']:
out.write( '{} \\\n'.format(input_parameters['vardict_arguments']) )
out.write( '-G {} \\\n'.format( mounted_genome_reference ) )
out.write( '-f {} -h \\\n'.format( minVAF ) )
out.write( '-b \'{}|{}\' \\\n'.format(mounted_tumor_bam, mounted_normal_bam) )
out.write( '-Q 1 -c 1 -S 2 -E 3 -g 4 {} \\\n'.format(bed_file) )
out.write( '> {}/vardict.var"\n\n'.format(mounted_outdir) )
out.write( '\n' )
out.write(f'{container_line} \\\n' )
out.write( 'bash -c "cat {}/vardict.var | awk \'NR!=1\' | /opt/VarDict/testsomatic.R | /opt/VarDict/var2vcf_paired.pl -N \'TUMOR|NORMAL\' -f {} \\\n'.format(mounted_outdir, minVAF ) )
out.write( '> {}/{}"\n\n'.format(mounted_outdir, input_parameters['outfile']) )
out.write( '\necho -e "Done at `date +"%Y/%m/%d %H:%M:%S"`" 1>&2\n' )
# "Run" the script that was generated
command_line = '{} {}'.format( input_parameters['action'], outfile )
returnCode = subprocess.call( command_line, shell=True )
return outfile
def tumor_only(input_parameters, tech='docker' ):
for param_i in DEFAULT_PARAMS:
if param_i not in input_parameters:
input_parameters[param_i] = DEFAULT_PARAMS[param_i]
# The following are required:
assert os.path.exists( input_parameters['bam'] )
assert os.path.exists( input_parameters['genome_reference'] )
logdir = os.path.join( input_parameters['output_directory'], 'logs' )
outfile = os.path.join( logdir, input_parameters['script'] )
all_paths = []
for path_i in input_parameters['bam'], input_parameters['genome_reference'], input_parameters['output_directory'], input_parameters['inclusion_region']:
if path_i:
all_paths.append( path_i )
container_line, fileDict = container.container_params( input_parameters['vardict_image'], tech=tech, files=all_paths, extra_args=input_parameters['extra_docker_options'] )
minVAF = input_parameters['minimum_VAF']
total_bases = 0
num_lines = 0
if input_parameters['inclusion_region']:
bed_file = input_parameters['inclusion_region']
with open(bed_file) as bed:
line_i = bed.readline().rstrip()
while line_i.startswith('track'):
line_i = bed.readline().rstrip()
while line_i:
item = line_i.rstrip().split('\t')
total_bases = total_bases + int(item[2]) - int(item[1])
num_lines += 1
line_i = bed.readline().rstrip()
else:
fai_file = input_parameters['genome_reference'] + '.fai'
bed_file = os.path.join(input_parameters['output_directory'], 'genome.bed')
with open(fai_file) as fai, open(bed_file, 'w') as wgs_bed:
for line_i in fai:
item = line_i.split('\t')
|
DarthMaulware/EquationGroupLeaks
|
Leak #5 - Lost In Translation/windows/Resources/Ops/PyScripts/lib/ops/cmd/audit.py
|
Python
|
unlicense
| 3,639
| 0.001924
|
import ops
import ops.cmd
import ops.env
import ops.cmd.safetychecks
OpsCommandException = ops.cmd.OpsCommandException
VALID_OPTIONS = ['status', 'on', 'off', 'disable', 'force']
class AuditCommand(ops.cmd.DszCommand, ):
optgroups = {'main': ['status', 'on', 'off', 'disable']}
reqgroups = ['main']
reqopts = []
defopts = {}
def __init__(self, plugin='audit', **optdict):
ops.cmd.DszCommand.__init__(self, plugin, **optdict)
def validateInput(self):
for opt in self.optdict:
if (opt not in VALID_OPTIONS):
return False
optcounts = {}
for req in self.reqgroups:
optcounts[req] = 0
for opt in self.optgroups[req]:
if (opt in self.optdict):
optcounts[req] += 1
if (optcounts['main'] != 1):
return False
return True
def __getDisable(self):
if ('disable' in self.optdict):
return self.optdict['disable']
else:
return None
def __setDisable(self, val):
if ((val is None) and ('disable' in self.optdict)):
del self.optdict['disable']
elif (val in ['all', 'security']):
self.optdict['disable'] = val
else:
raise OpsCommandException(('Invalid value for -disable: %s' % val))
disable = property(__getDisable, __setDisable)
def __getForce(self):
return (('force' in self.optdict) and self.optdict['force'])
def __setForce(self, val):
if (((val is None) or (val is False)) and ('force' in self.optdict)):
del self.optdict['force']
elif val:
self.optdict['force'] = True
force = property(__getForce, __setForce)
def __getStatus(self):
return (('status' in self.optdict) and self.optdict['status'])
def __setStatus(self, val):
if (((val is None) or (val is False)) and ('status' in self.optdict)):
del self.optdict['status']
elif val:
self.optdict['status'] = True
audit_status = property(__getStatus, __setStatus)
def __getOn(self):
return (('on' in self.optdict) and self.optdict['on'])
def __setOn(self, val):
if (((val is None) or (val is False)) and ('on' in self.optdict)):
del self.optdict['on']
elif val:
self.optdict['on'] = True
self.optdict['off'] = False
audit_on = property(__getOn, __setOn)
def __getOff(self):
return (('off' in self.optdict) and self.optdict['off'])
def __setOff(self, val):
if (((val is None) or (val is False)) and ('off' in self.optdict)):
del self.optdict['off']
elif val:
self.optdict['off'] = True
self.optdict['on'] = False
audit_off = property(__getOff, __setOff)
def mySafetyCheck(self):
good = True
msgparts = []
if ((ops.env.get('OPS_NOINJECT').upper() == 'TRUE') and (self.disable is not None)):
good = False
msgparts.append('OPS_NOINJECT is set to TRUE, you should probably not disable auditing')
if (self.force or self.audit_off or self.audit_on):
good = False
msgparts.app
|
end('Altering audit policy in a script is not safe, verify you really want to do that')
msg = ''
if (len(msgparts) > 0):
msg = msgparts[0]
for msgpart in msgparts[1:]:
msg += ('\n\t' + msgpart)
return (good, msg)
ops.cmd.command_classes['aud
|
it'] = AuditCommand
ops.cmd.aliasoptions['audit'] = VALID_OPTIONS
ops.cmd.safetychecks.addSafetyHandler('audit', 'ops.cmd.audit.mySafetyCheck')
|
tuaminx/ping_server
|
send_alert.py
|
Python
|
apache-2.0
| 732
| 0
|
import subprocess
from threading import Timer
def send_alert(message, log=None):
""" This function is used by ping_server.py to send alert
Do NOT change this function name.
:param message: Mess to be sent out
:param log: logger object passed from ping_server.py
:return: None
"""
try:
log.error('ALERT AL
|
ERT ALERT: %s' % message)
except Exception:
print('ALERT ALERT ALERT:
|
%s' % message)
pass
palert = subprocess.Popen(["echo %s" % message],
stdout=subprocess.PIPE,
shell=True)
time = Timer(30, palert.kill)
time.start()
_, _ = palert.communicate()
if time.is_alive():
time.cancel()
|
huiyiqun/check_mk
|
agents/windows/it/remote.py
|
Python
|
gpl-2.0
| 6,472
| 0.000464
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset: 4 -*-
import ConfigParser
import contextlib
import os
import platform
import pytest
import re
import subprocess
import sys
import telnetlib # nosec
# To use another host for running the tests, replace this IP address.
remote_ip = '10.1.2.30'
# To use another user account for running the tests, replace this username.
remoteuser = 'NetworkAdministrator'
remotedir = os.path.join(os.sep, 'Users', remoteuser, 'Tests')
sshopts = '-o StrictHostKeyChecking=no'
host = 'localhost'
port = 9999
agent_exe = os.pat
|
h.join(remotedir, 'check_mk_agent-64.exe')
ini_filename = os.path.join(remotedir, 'check_mk.ini')
def run_subprocess(cmd):
sys.stderr.write(' '.join(cmd) + '\n')
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return (p.returncode, stdout, stderr)
def assert_subprocess(cmd):
exit_code, stdout, stderr = run_subprocess(cmd)
if stdout:
sys.stdout.write(stdout)
if stderr:
sys.stderr.wri
|
te(stderr)
assert exit_code == 0, "'%s' failed" % ' '.join(cmd)
@pytest.fixture
def config():
ini = IniWriter()
ini.add_section('global')
ini.set('global', 'port', port)
return ini
@pytest.fixture
def write_config(testconfig):
if platform.system() == 'Windows':
with open(ini_filename, 'wb') as inifile:
testconfig.write(inifile)
yield
# Override this in test file(s) to insert a wait before contacting the agent.
@pytest.fixture
def wait_agent():
def inner():
pass
return inner
@pytest.fixture
def actual_output(write_config, wait_agent):
if platform.system() == 'Windows':
# Run agent and yield telnet output.
try:
save_cwd = os.getcwd()
os.chdir(remotedir)
p = subprocess.Popen([agent_exe, 'adhoc'])
# Override wait_agent in tests to wait for async processes to start.
wait_agent()
telnet = telnetlib.Telnet(host, port) # nosec
yield telnet.read_all().splitlines()
finally:
try:
telnet.close()
p.terminate()
# Possibly wait for async processes to stop.
wait_agent()
# Thrown if something goes wrong before variable assigment
except UnboundLocalError as e:
sys.stderr.write('%s\n' % str(e))
assert 0, '%s' % str(e)
os.chdir(save_cwd)
else:
# Not on Windows, test run remotely, nothing to be done.
yield
class DuplicateSectionError(Exception):
"""Raised when a section is multiply-created."""
def __init__(self, section):
super(DuplicateSectionError,
self).__init__(self, 'Section %r already exists' % section)
class NoSectionError(Exception):
"""Raised when no section matches a requested option."""
def __init__(self, section):
super(NoSectionError, self).__init__(self,
'No section: %r' % (section))
class IniWriter(ConfigParser.RawConfigParser):
"""Writer for Windows ini files. Simplified version of RawConfigParser but
supports multiple values for a single key."""
def add_section(self, section):
"""Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT or any of it's
case-insensitive variants.
"""
if section.lower() == 'default':
raise ValueError, 'Invalid section name: %s' % section
if section in self._sections:
raise DuplicateSectionError(section)
self._sections[section] = self._dict()
def set(self, section, option, value=None):
"""Set an option."""
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
if option in sectdict:
sectdict[option].append(value)
else:
sectdict[option] = [value]
def write(self, filehandle):
for section, options in self._sections.iteritems():
filehandle.write('[%s]\r\n' % section)
for key, values in options.iteritems():
for value in values:
filehandle.write(' %s = %s\r\n' % (key, value))
def remotetest(expected_output,
actual_output,
testfile,
testname=None,
testclass=None):
# Not on Windows: call given test remotely over ssh
if platform.system() != 'Windows':
cmd = [
'ssh', sshopts,
'%s@%s' % (remoteuser, remote_ip),
'py.test %s%s%s' % (os.path.join(remotedir, testfile),
('::%s' % testclass)
if testclass else '', ('::%s' % testname)
if testname else '')
]
assert_subprocess(cmd)
# On Windows: verify output against expected
else:
comparison_data = zip(expected_output, actual_output)
for expected, actual in comparison_data:
if actual == 'WMItimeout':
pytest.skip('WMI timeout, better luck next time')
# Uncomment for debug prints:
# if re.match(expected, actual) is None:
# print 'DEBUG: actual output\r\n', '\r\n'.join(actual_output)
# print 'DEBUG: expected output\r\n', '\r\n'.join(expected_output)
assert re.match(
expected, actual) is not None, ("expected '%s', actual '%s'" %
(expected, actual))
try:
assert len(actual_output) >= len(expected_output), (
'actual output is shorter than expected:\n'
'expected output:\n%s\nactual output:\n%s' %
('\n'.join(expected_output), '\n'.join(actual_output)))
assert len(actual_output) <= len(expected_output), (
'actual output is longer than expected:\n'
'expected output:\n%s\nactual output:\n%s' %
('\n'.join(expected_output), '\n'.join(actual_output)))
except TypeError:
# expected_output may be an iterator without len
assert len(actual_output) > 0, 'Actual output was empty'
|
alobbs/webest
|
webest/obj.py
|
Python
|
mit
| 2,552
| 0
|
import retrying
import selenium
import selenium.webdriver.support.ui as ui
from . import exceptions as ex
@retrying.retry(wait_fixed=1000, retry_on_exception=ex.is_retry_exception)
def get(b, selector, not_found=None):
try:
obj = b.find_element_by_css_selector(selector)
except selenium.common.exceptions.NoSuchElementException:
return not_found
return obj
@retrying.retry(wait_fixed=1000, retry_on_exception=ex.is_retry_exception)
def get_objs(b, selector, not_found=None):
try:
objs = b.find_elements_by_css_selector(selector)
except selenium.common.exceptions.NoSuchElementException:
return not_found
return objs
@retrying.retry(wait_fixed=1000, retry_on_exception=ex.is_retry_exception)
def is_visible(b, selector):
try:
obj = b.find_element_by_css_selector(selector)
except sele
|
nium.common.exceptions.NoSuchElementException:
return False
return obj.is_displayed()
@retrying.retry(wait_fixed=1000, retry_on_exception=ex.is_retry_exception)
def is_enabled(b, selector):
try:
obj = b.find_element_by_css_selector(selector)
except selenium.common.exceptions.NoSuchElementException:
return False
return obj.is_enabled()
def get_text(b, selector, not_found=None):
obj = get(b, selector)
if obj:
return obj.text
|
return not_found
def obj_attr(b, selector, attr, not_found=None):
obj = get(b, selector)
if obj:
re = obj.get_attribute(attr)
if re is None:
return not_found
return re
return not_found
def wait_for_obj(b, selector, timeout=30):
wait = ui.WebDriverWait(b, timeout)
wait.until(lambda driver, s=selector: get(b, s))
return get(b, selector)
def wait_for_any_obj(b, selectors, timeout=30):
def check_func(b):
return any([get(b, s) for s in selectors])
wait = ui.WebDriverWait(b, timeout)
wait.until(check_func)
for s in selectors:
obj = get(b, s)
if obj:
return obj
def wait_while_obj(b, selector, timeout=30):
wait = ui.WebDriverWait(b, timeout)
wait.until(lambda driver, s=selector: not get(b, s))
return get(b, selector)
def wait_while_visible(b, selector, timeout=30):
wait = ui.WebDriverWait(b, timeout)
wait.until(lambda driver, s=selector: not get(b, s))
return get(b, selector)
def wait_while_hiden(b, selector, timeout=30):
wait = ui.WebDriverWait(b, timeout)
wait.until(lambda driver, s=selector: get(b, s))
return get(b, selector)
|
yoseforb/lollypop
|
src/settings.py
|
Python
|
gpl-3.0
| 14,299
| 0.00028
|
#!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, GLib, Gio, Pango
try:
from gi.repository import Secret
except:
Secret = None
from gettext import gettext as _
from _thread import start_new_thread
from lollypop.define import Lp, Type, SecretSchema, SecretAttributes
from lollypop.utils import use_csd
# Lollypop settings
class Settings(Gio.Settings):
"""
Init settings
"""
def __init__(self):
Gio.Settings.__init__(self)
"""
Return a new Settings object
"""
def new():
settings = Gio.Settings.new('org.gnome.Lollypop')
settings.__class__ = Settings
return settings
"""
Return music paths
@return [str]
"""
def get_music_paths(self):
paths = self.get_value('music-path')
if not paths:
if GLib.get_user_special_dir(GLib.UserDirectory.DIRECTORY_MUSIC):
paths = [GLib.get_user_special_dir(
GLib.UserDirectory.DIRECTORY_MUSIC)]
else:
print("You need to add a music path"
" to org.gnome.Lollypop in dconf")
return paths
# Dialog showing lollypop options
class SettingsDialog:
def __init__(self):
self._choosers = []
builder = Gtk.Builder()
builder.add_from_resource('/org/gnome/Lollypop/SettingsDialog.ui')
self._settings_dialog = builder.get_object('settings_dialog')
self._settings_dialog.set_transient_for(Lp.window)
if use_csd():
self._settings_dialog.set_titlebar(
builder.get_object('header_bar'))
switch_scan = builder.get_object('switch_scan')
switch_scan.set_state(Lp.settings.get_value('auto-upd
|
ate'))
switch_view = builder.get_object('switch_dark')
switch_view.set_sta
|
te(Lp.settings.get_value('dark-ui'))
switch_background = builder.get_object('switch_background')
switch_background.set_state(Lp.settings.get_value('background-mode'))
switch_state = builder.get_object('switch_state')
switch_state.set_state(Lp.settings.get_value('save-state'))
switch_autoplay = builder.get_object('switch_autoplay')
switch_autoplay.set_state(Lp.settings.get_value('auto-play'))
switch_genres = builder.get_object('switch_genres')
switch_genres.set_state(Lp.settings.get_value('show-genres'))
self._settings_dialog.connect('destroy', self._edit_settings_close)
builder.connect_signals(self)
main_chooser_box = builder.get_object('main_chooser_box')
self._chooser_box = builder.get_object('chooser_box')
party_grid = builder.get_object('party_grid')
#
# Music tab
#
dirs = []
for directory in Lp.settings.get_value('music-path'):
dirs.append(directory)
# Main chooser
self._main_chooser = ChooserWidget()
image = Gtk.Image.new_from_icon_name("list-add-symbolic",
Gtk.IconSize.MENU)
self._main_chooser.set_icon(image)
self._main_chooser.set_action(self._add_chooser)
main_chooser_box.pack_start(self._main_chooser, False, True, 0)
if len(dirs) > 0:
path = dirs.pop(0)
else:
path = GLib.get_user_special_dir(
GLib.UserDirectory.DIRECTORY_MUSIC)
self._main_chooser.set_dir(path)
# Others choosers
for directory in dirs:
self._add_chooser(directory)
#
# Party mode tab
#
genres = Lp.genres.get()
genres.insert(0, (Type.POPULARS, _("Populars")))
genres.insert(1, (Type.RECENTS, _("Recents")))
ids = Lp.player.get_party_ids()
i = 0
x = 0
for genre_id, genre in genres:
label = Gtk.Label()
label.set_property('margin-start', 10)
label.set_property('halign', Gtk.Align.START)
label.set_property('hexpand', True)
label.set_ellipsize(Pango.EllipsizeMode.END)
label.set_text(genre)
label.show()
switch = Gtk.Switch()
if genre_id in ids:
switch.set_state(True)
switch.connect("state-set", self._party_switch_state, genre_id)
switch.set_property('margin-end', 50)
switch.show()
party_grid.attach(label, x, i, 1, 1)
party_grid.attach(switch, x+1, i, 1, 1)
if x == 0:
x += 2
else:
i += 1
x = 0
#
# Last.fm tab
#
if Lp.lastfm is not None and Secret is not None:
self._test_img = builder.get_object('test_img')
self._login = builder.get_object('login')
self._password = builder.get_object('password')
schema = Secret.Schema.new("org.gnome.Lollypop",
Secret.SchemaFlags.NONE,
SecretSchema)
Secret.password_lookup(schema, SecretAttributes, None,
self._on_password_lookup)
builder.get_object('lastfm_grid').set_sensitive(True)
builder.get_object('lastfm_error').hide()
self._login.set_text(
Lp.settings.get_value('lastfm-login').get_string())
"""
Show dialog
"""
def show(self):
self._settings_dialog.show()
#######################
# PRIVATE #
#######################
"""
Add a new chooser widget
@param directory path as string
"""
def _add_chooser(self, directory=None):
chooser = ChooserWidget()
image = Gtk.Image.new_from_icon_name("list-remove-symbolic",
Gtk.IconSize.MENU)
chooser.set_icon(image)
if directory:
chooser.set_dir(directory)
self._chooser_box.add(chooser)
"""
Update view setting
@param widget as unused, state as widget state
"""
def _update_ui_setting(self, widget, state):
Lp.settings.set_value('dark-ui', GLib.Variant('b', state))
if not Lp.player.is_party():
settings = Gtk.Settings.get_default()
settings.set_property("gtk-application-prefer-dark-theme", state)
Lp.window.update_view()
"""
Update scan setting
@param widget as unused, state as widget state
"""
def _update_scan_setting(self, widget, state):
Lp.settings.set_value('auto-update',
GLib.Variant('b', state))
"""
Update background mode setting
@param widget as unused, state as widget state
"""
def _update_background_setting(self, widget, state):
Lp.settings.set_value('background-mode',
GLib.Variant('b', state))
"""
Update save state setting
@param widget as unused, state as widget state
"""
def _update_state_setting(self, widget, state):
Lp.settings.set_value('save-state',
GLib.Variant('b', state))
"""
Update show genre setting
@param widget as unused, state as widget state
"""
def _update_genres_setting(self, widget, state):
Lp.window.show_genres(state)
Lp.settings.set_value('show-genres',
|
Migdalo/para
|
setup.py
|
Python
|
mit
| 494
| 0.006073
|
from setuptools import setup
import unittest
def para_test_suite():
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests', pattern='test_*.py')
return test_suite
setup(name='para',
version='2.0.1',
author='Migdalo',
license='MIT',
packages=['para'],
test_suite='setup.para_test_suite',
entry_points={
'console_scripts': [
'para = para.para:process_arguments'
|
]
},
zip_safe=True)
|
|
thought-machine/please
|
test/python_rules/data_dep_test.py
|
Python
|
apache-2.0
| 577
| 0.001733
|
"""Test on deps, data, re
|
quires and provides."""
import os
import subprocess
import unittest
class DataDepTest(unittest.TestCase
|
):
def test_direct_dep(self):
"""Test that we can import the module directly."""
from test.python_rules import data_dep
self.assertEqual(42, data_dep.the_answer())
def test_data_dep(self):
"""Test that we can also invoke the .pex directly as a data dependency."""
output = subprocess.check_output(['test/python_rules/data_dep.pex'])
self.assertEqual('42', output.strip().decode('utf-8'))
|
openlabs/raven
|
tests/contrib/tornado/tests.py
|
Python
|
bsd-3-clause
| 5,739
| 0.000174
|
# -*- coding: utf-8 -*-
"""
tests
Test the tornado Async Client
"""
import unittest
from mock import patch
from tornado import web, gen, testing
from raven.contrib.tornado import SentryMixin, AsyncSentryClient
class AnErrorProneHandler(SentryMixin, web.RequestHandler):
def get(self):
try:
raise Exception("Damn it!")
except Exception, e:
self.captureException(True)
class SendErrorTestHandler(SentryMixin, web.RequestHandler):
def get(self):
raise Exception("Oops")
class SendErrorAsyncHandler(SentryMixin, web.RequestHandler):
@web.asynchronous
@gen.engine
def get(self):
raise Exception("Oops")
class AsyncMessageHandler(SentryMixin, web.RequestHandler):
@web.asynchronous
@gen.engine
def get(self):
# Compute something crazy
response = yield gen.Task(
self.captureMessage, "Something totally crazy was just done"
)
self.set_header('X-Sentry-ID', 'The ID')
self.finish()
def get_current_user(self):
return {
'name': 'John Doe'
}
class TornadoAsyncClientTestCase(testing.AsyncHTTPTestCase):
def get_app(self):
app = web.Application([
web.url(r'/an-error', AnErrorProneHandler),
web.url(r'/an-async-message', AsyncMessageHandler),
web.url(r'/send-error', SendErrorTestHandler),
web.url(r'/send-error-async', SendErrorAsyncHandler),
])
app.sentry_client = AsyncSentryClient(
'http://public_key:secret_key@host:9000/project'
)
return app
@patch('raven.contrib.tornado.AsyncSentryClient.send')
def test_error_handler(self, send):
response = self.fetch('/an-error?qs=qs')
self.assertEqual(response.code, 200)
self.assertEqual(send.call_count, 1)
args, kwargs = send.call_args
self.assertEqual(kwargs['public_key'], 'public_key')
self.assertTrue(('sentry.interfaces.User' in kwargs))
self.assertTrue(('sentry.interfaces.Stacktrace' in kwargs))
self.assertTrue(('sentry.interfaces.Http' in kwargs))
self.assertTrue(('sentry.interfaces.Exception' in kwargs))
http_data = kwargs['sentry.interfaces.Http']
self.assertEqual(http_data['cookies'], None)
self.assertEqual(http_data['url'], response.effective_url)
self.assertEqual(http_data['query_string'], 'qs=qs')
self.assertEqual(http_data['method'], 'GET')
user_data = kwargs['sentry.interfaces.User']
self.assertEqual(user_data['is_authenticated'], False)
@patch(
'raven.contrib.tornado.AsyncSentryClient.send',
side_effect=lambda *args, **kwargs: kwargs['callback']("done"))
def test_message_handler(self, send):
response = self.fetch('/an-async-message?qs=qs')
self.assertEqual(response.code, 200)
self.assertEqual(send.call_count, 1)
args, kwargs = send.call_args
self.assertEqual(kwargs['public_key'], 'public_key')
self.assertTrue(('sentry.interfaces.User' in kwargs))
self.assertTrue(('sentry.interfaces.Http' in kwargs))
self.assertTrue(('sentry.interfaces.Message' in kwargs))
http_data = kwargs['sentry.interfaces.Http']
self.assertEqual(http_data['cookies'], None)
self.assertEqual(http_data['url'], response.effective_url)
self.assertEqual(http_data['query_string'], 'qs=qs')
self.assertEqual(http_data['method'], 'GET')
user_data = kwargs['sentry.interfaces.User']
self.assertEqual(user_data['is_authenticated'], True)
@patch('raven.contrib.tornado.AsyncSentryClient.send')
def test_send_error_handler(self, send):
response = self.fetch('/send-error?qs=qs')
self.assertEqual(response.code, 500)
self.assertEqual(send.call_count, 1)
args, kwargs = send.call_args
self.assertEqual(kwargs['public_key'], 'public_key')
self.assertTrue(('sentry.interfaces.User' in kwargs))
self.assertTrue(('sentry.interfaces.Stacktrace' in kwargs))
|
self.assertTrue(('sentry.interfaces.Http' in kwargs))
|
self.assertTrue(('sentry.interfaces.Exception' in kwargs))
http_data = kwargs['sentry.interfaces.Http']
self.assertEqual(http_data['cookies'], None)
self.assertEqual(http_data['url'], response.effective_url)
self.assertEqual(http_data['query_string'], 'qs=qs')
self.assertEqual(http_data['method'], 'GET')
user_data = kwargs['sentry.interfaces.User']
self.assertEqual(user_data['is_authenticated'], False)
@patch('raven.contrib.tornado.AsyncSentryClient.send')
def test_send_error_handler_async(self, send):
response = self.fetch('/send-error-async?qs=qs')
self.assertEqual(response.code, 500)
self.assertEqual(send.call_count, 1)
args, kwargs = send.call_args
self.assertEqual(kwargs['public_key'], 'public_key')
self.assertTrue(('sentry.interfaces.User' in kwargs))
self.assertTrue(('sentry.interfaces.Stacktrace' in kwargs))
self.assertTrue(('sentry.interfaces.Http' in kwargs))
self.assertTrue(('sentry.interfaces.Exception' in kwargs))
http_data = kwargs['sentry.interfaces.Http']
self.assertEqual(http_data['cookies'], None)
self.assertEqual(http_data['url'], response.effective_url)
self.assertEqual(http_data['query_string'], 'qs=qs')
self.assertEqual(http_data['method'], 'GET')
user_data = kwargs['sentry.interfaces.User']
self.assertEqual(user_data['is_authenticated'], False)
if __name__ == '__main__':
unittest.main()
|
sebest/katana
|
katana/utils.py
|
Python
|
mpl-2.0
| 2,188
| 0
|
import os
import errno
import fcntl
from contextlib import contextmanager
from time import time, sleep
@contextmanager
def wlock(filename, retry_interval=0.05):
# returns: write, exists, fd
try:
with open(filename, 'rb+') as lock:
try:
fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as exc:
if exc.errno == errno.EAGAIN:
while True:
try:
fcntl.flock(lock, fcntl.LOCK_SH | fcntl.LOCK_NB)
except IOError as exc:
if exc.errno == errno.EAGAIN:
|
sleep(retry_interval)
continue
else:
raise
else:
yield False, True, lock
break
else:
raise
els
|
e:
yield True, True, lock
except IOError as exc:
if exc.errno == errno.ENOENT:
with open(filename, 'wb') as lock:
while True:
try:
fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as exc:
if exc.errno == errno.EAGAIN:
sleep(retry_interval)
continue
else:
raise
else:
yield True, False, lock
if os.path.exists(filename):
if not lock.closed:
lock.seek(0, 2)
if not lock.tell():
os.unlink(filename)
elif not os.path.getsize(filename):
os.unlink(filename)
break
else:
raise
class Timer:
def __init__(self):
self.start = time()
def __str__(self):
return '%.3f ms' % ((time() - self.start) * 1000)
|
LLNL/spack
|
var/spack/repos/builtin/packages/r-dss/package.py
|
Python
|
lgpl-2.1
| 1,309
| 0.000764
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RDss(RPackage):
"""Dispersion shrinkage for sequencing data
DSS is an R library performing differntial analysis for count-based
sequencing data. It detectes differentially expressed genes (DEGs) from
RNA-seq, and differentially methylated loci or regions (DML/DMRs) from
bisulfite sequencing (BS-seq). The core of DSS is a new dispersion
shrinkage method for estimating the dispersion parameter from Gamma-Poisson
or Beta-Binomial distributions."""
homepage = "https://bioconductor.org/packages/DSS/"
git = "https://git.bioconductor.org/packages/DSS"
version('2.38.0', commit='82e65b92e6e227f1f99620362db8b03059e07e98')
version('2
|
.36.0', commit='841c7ed')
version('2.34.0', commit='f9819c7')
version('2.32.0', commit='ffb502d')
depends_on('r@3.3:', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r-biocparallel', when='@2.36.0:', type=('build', 'run'))
depends_on('r-bsseq', type=('build', 'run'))
depends_on('r-delayedarray', when='@2.36.0:', type=('
|
build', 'run'))
|
pygraz/old-flask-website
|
migrations/versions/002_notificationflags.py
|
Python
|
bsd-3-clause
| 896
| 0.001116
|
from sqlalchemy import *
from migrate import *
meta = MetaData()
user_tbl = Table('user', meta)
status_col = Column('email_status', String, default='not_verified')
activation_code_col = Column('email_activation_code', String, nullable=True)
notify_new_meetup = Column('email_notify_new_meetup', Boolean, default=False)
no
|
tify_new_sessionidea = Column('email_noti
|
fy_new_sessionidea', Boolean, default=False)
def upgrade(migrate_engine):
meta.bind = migrate_engine
user_tbl.create_column(status_col)
user_tbl.create_column(activation_code_col)
user_tbl.create_column(notify_new_meetup)
user_tbl.create_column(notify_new_sessionidea)
def downgrade(migrate_engine):
meta.bind = migrate_engine
user_tbl.drop_column(status_col)
user_tbl.drop_column(activation_code_col)
user_tbl.drop_column(notify_new_meetup)
user_tbl.drop_column(notify_new_sessionidea)
|
ashleysommer/sanic-cors
|
tests/decorator/test_exception_interception.py
|
Python
|
mit
| 9,339
| 0.00182
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Sanic-CORS is a simple extension to Sanic allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2020 by Ashley Sommer (based on flask-cors by Cory Dolphin).
:license: MIT, see LICENSE for more details.
"""
from ..base_test import SanicCorsTestCase
from sanic import Sanic
from sanic_cors import *
from sanic_cors.core import *
from sanic.exceptions import NotFound, ServerError
from sanic.response import text
def add_routes(app):
#@app.route('/test_no_acl_abort_404')
#@app.route('/test_acl_abort_404')
def test_acl_abort_404(request):
raise NotFound("")
app.route('/test_no_acl_abort_404')(test_acl_abort_404)
app.route('/test_acl_abort_404')(test_acl_abort_404)
#@app.route('/test_no_acl_async_abort_404')
#@app.route('/test_acl_async_abort_404')
async def test_acl_async_abort_404(request):
raise NotFound("")
app.route('/test_no_acl_async_abort_404')(test_acl_async_abort_404)
app.route('/test_acl_async_abort_404')(test_acl_async_abort_404)
#@app.route('/test_no_acl_abort_500')
#@app.route('/test_acl_abort_500')
def test_acl_abort_500(request):
raise ServerError("")
app.route('/test_no_acl_abort_500')(test_acl_abort_500)
app.route('/test_acl_abort_500')(test_acl_abort_500)
@app.route('/test_acl_uncaught_exception_500')
def test_acl_uncaught_exception_500(request):
raise Exception("This could've been any exception")
@app.route('/test_no_acl_uncaught_exception_500')
def test_no_acl_uncaught_exception_500(request):
raise Exception("This could've been any exception")
class ExceptionInterceptionDefaultTestCase(SanicCorsTestCase):
def setUp(self):
self.app = Sanic(__name__.replace(".","-"))
CORS(self.app, resources={
r'/test_acl*': {},
})
add_routes(self.app)
def test_acl_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches.
'''
resp = self.get('/test_acl_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_acl_async_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches.
'''
resp = self.get
|
('/test_acl_async_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_no_a
|
cl_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path does not match.
'''
resp = self.get('/test_no_acl_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_no_acl_async_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path does not match.
'''
resp = self.get('/test_no_acl_async_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_abort_500(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches
'''
resp = self.get('/test_acl_abort_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_no_acl_abort_500(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches
'''
resp = self.get('/test_no_acl_abort_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_uncaught_exception_500(self):
'''
Uncaught exceptions will trigger Sanic's internal exception
handler, and should have ACL headers only if intercept_exceptions
is set to True and if the request URL matches the resources pattern
This url matches.
'''
resp = self.get('/test_acl_uncaught_exception_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_no_acl_uncaught_exception_500(self):
'''
Uncaught exceptions will trigger Sanic's internal exception
handler, and should have ACL headers only if intercept_exceptions
is set to True and if the request URL matches the resources pattern.
This url does not match.
'''
resp = self.get('/test_no_acl_uncaught_exception_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_exception_with_error_handler(self):
'''
If a 500 handler is setup by the user, responses should have
CORS matching rules applied, regardless of whether or not
intercept_exceptions is enabled.
'''
return_string = "Simple error handler"
@self.app.exception(NotFound, ServerError, Exception)
def catch_all_handler(request, exception):
'''
This error handler catches 404s and 500s and returns
status 200 no matter what. It is not a good handler.
'''
return text(return_string)
acl_paths = [
'/test_acl_abort_404',
'/test_acl_abort_500',
'/test_acl_uncaught_exception_500'
]
no_acl_paths = [
'/test_no_acl_abort_404',
'/test_no_acl_abort_500',
'/test_no_acl_uncaught_exception_500'
]
def get_with_origins(path):
response = self.get(path, origin='www.example.com')
return response
for resp in map(get_with_origins, acl_paths):
self.assertEqual(resp.status, 200)
self.assertTrue(ACL_ORIGIN in resp.headers)
for resp in map(get_with_origins, no_acl_paths):
self.assertEqual(resp.status, 200)
self.assertFalse(ACL_ORIGIN in resp.headers)
class NoExceptionInterceptionTestCase(ExceptionInterceptionDefaultTestCase):
def setUp(self):
self.app = Sanic(__name__.replace(".","-"))
CORS(self.app,
intercept_exceptions=False,
resources={
r'/test_acl*': {},
})
add_routes(self.app)
def test_acl_exception_with_error_handler(self):
'''
If a 500 handler is setup by the user, responses should have
CORS matching rules applied, regardless of whether or not
intercept_exceptions is enbaled.
'''
return_string = "Simple error handler"
@self.app.exception(ServerError, NotFound, Exception)
# Note, async error handlers don't work in Sanic yet.
# async def catch_all_handler(request, exception):
def catch_all_handler(request, exception):
'''
This error handler catches 404s and 500s and returns
status 200 no matter what. It is not a good handler.
'''
return text(return_string, 200)
acl_paths = [
'/test_acl_abort_404',
'/
|
marcelogomess/glpi_api
|
setup.py
|
Python
|
bsd-2-clause
| 344
| 0.002907
|
from distutils.core import setup
setup(
name='glpi_api',
version='0.0.1',
packages=['requests'],
|
url='http
|
s://github.com/marcelogomess/glpi_api.git',
license='BSD 2',
author='marcelogomess',
author_email='celo.gomess@gmail.com',
description='Just a app to start with glpi api communitacion. glpi-project.org'
)
|
statsmodels/statsmodels
|
statsmodels/graphics/utils.py
|
Python
|
bsd-3-clause
| 4,032
| 0.000496
|
"""Helper functions for graphics with Matplotlib."""
from statsmodels.compat.python import lrange
__all__ = ['create_mpl_ax', 'create_mpl_fig']
def _import_mpl():
"""This function is not needed outside this utils module."""
try:
import matplotlib.pyplot as plt
except:
raise ImportError("Matplotlib is not found.")
return plt
def create_mpl_ax(ax=None):
"""Helper function for when a single plot axis is needed.
Parameters
----------
ax : AxesSubplot, optional
If given, this subplot is used to plot in instead of a new figure being
created.
Returns
-------
fig : Figure
If `ax` is None, the created figure. Otherwise the figure to which
`ax` is connected.
ax : AxesSubplot
The created axis if `ax` is None, otherwise the axis that was passed
in.
Notes
-----
This function imports `matplotlib.pyplot`, which should only be done to
create (a) figure(s) with ``plt.figure``. All other functionality exposed
by the pyplot module can and should be imported directly from its
Matplotlib module.
See Also
--------
create_mpl_fig
Examples
--------
A plotting function has a keyword ``ax=None``. Then calls:
>>> from statsmodels.graphics import utils
>>> fig, ax = utils.create_mpl_ax(ax)
"""
if ax is None:
plt = _import_mpl()
fig = plt.figure()
ax = fig.add_subplot(111)
else:
fig = ax.figure
return fig, ax
def create_mpl_fig(fig=None, figsize=None):
"""Helper function for when multiple plot axes are needed.
Those axes should be created in the functions they are used in, with
``fig.add_subplot()``.
Parameters
----------
fig : Figure, optional
If given, this figure is simply returned. Otherwise a new figure is
created.
Returns
-------
Figure
If `f
|
ig` is None, the created figure. Otherwise the input `fig` is
returned.
See Also
--------
create_mpl_ax
"""
if fig is None:
plt = _import_mpl()
fig = plt.figure(figsize=figsi
|
ze)
return fig
def maybe_name_or_idx(idx, model):
"""
Give a name or an integer and return the name and integer location of the
column in a design matrix.
"""
if idx is None:
idx = lrange(model.exog.shape[1])
if isinstance(idx, int):
exog_name = model.exog_names[idx]
exog_idx = idx
# anticipate index as list and recurse
elif isinstance(idx, (tuple, list)):
exog_name = []
exog_idx = []
for item in idx:
exog_name_item, exog_idx_item = maybe_name_or_idx(item, model)
exog_name.append(exog_name_item)
exog_idx.append(exog_idx_item)
else: # assume we've got a string variable
exog_name = idx
exog_idx = model.exog_names.index(idx)
return exog_name, exog_idx
def get_data_names(series_or_dataframe):
"""
Input can be an array or pandas-like. Will handle 1d array-like but not
2d. Returns a str for 1d data or a list of strings for 2d data.
"""
names = getattr(series_or_dataframe, 'name', None)
if not names:
names = getattr(series_or_dataframe, 'columns', None)
if not names:
shape = getattr(series_or_dataframe, 'shape', [1])
nvars = 1 if len(shape) == 1 else series_or_dataframe.shape[1]
names = ["X%d" for _ in range(nvars)]
if nvars == 1:
names = names[0]
else:
names = names.tolist()
return names
def annotate_axes(index, labels, points, offset_points, size, ax, **kwargs):
"""
Annotate Axes with labels, points, offset_points according to the
given index.
"""
for i in index:
label = labels[i]
point = points[i]
offset = offset_points[i]
ax.annotate(label, point, xytext=offset, textcoords="offset points",
size=size, **kwargs)
return ax
|
petr-devaikin/dancee
|
helpers/extractor.py
|
Python
|
gpl-3.0
| 5,790
| 0.03057
|
# Cut the experiment session in small fragments
# Input: ../bin/data/records/{session}/body.csv and skeletok.csv
# Output: fragments/{fragment_number}.json and fragments/log.csv
import os
import numpy
import json
DELAY = 15
LENGTH = 30
OVERLAP = 0.719999
FREQUENCY = 60
MARGIN = 5
FREQUENCY = 60
CUTOFF_FREQUENCY = 10
buf_length = FREQUENCY / CUTOFF_FREQUENCY
kernel = numpy.blackman(buf_length)
kernel_summ = numpy.sum(kernel)
to_filter = [
9, 10, 11, #r_shoulder
12, 13, 14, #r_elbow
15, 16, 17, #r_hand
18, 19, 20, #l_shoulder
21, 22, 23, #l_elbow
24, 25, 26, #l_hand
27, 28, 29, #r_hip
30, 31, 32, #r_knee
36, 37, 38, #r_foot
39, 40, 41, #l_hip
42, 43, 44, #l_knee
48, 49, 50 #l_foot
]
buffers = [[0] * buf_length] * len(to_filter)
values = [0] * len(to_filter)
values2 = [0] * len(to_filter)
# emg filtering
CUTOFF_EMG_FREQUENCY = 6
buf_emg_length = FREQUENCY / CUTOFF_EMG_FREQUENCY
kernel_emg = numpy.blackman(buf_emg_length)
kernel_emg_summ = numpy.sum(kernel_emg)
emg2_buffer =
|
[0] * buf_emg_length
# acc filtering
CUTOFF_ACC_FREQUENCY = 10
buf_acc_length = FREQUENCY / CUTOFF_ACC_FREQUENCY
kernel_acc = numpy.blackman(buf_acc_length)
kernel_acc_summ = numpy.sum(kernel_acc)
a
|
cc_buffer = [[0] * buf_acc_length] * 3
# clean the folder
for f in os.listdir("fragments"):
os.remove(os.path.join('fragments', f))
# cut fragments
record_counter = 0
def cut_fragment(participant, track_number):
global record_counter
global values
global values2
global buffers
global emg2_buffer
global acc_buffer
print "Cut participant " + participant + ", track " + str(track_number)
result_data = {
'acc1': [],
'acc2': [],
'acc2_nf': [],
'emg1': [],
'emg2': [],
'emg2_nf': [],
'skeleton': [],
'skeleton_nf': [],
}
path = "../bin/data/records/"
for rec in os.listdir(path):
if rec.split(' ')[0] == participant:
with open(path + rec + "/body.csv", 'r') as f_read_body:
with open(path + rec + "/skeleton.csv", 'r') as f_read_skeleton:
i = 0
while i < (DELAY + (OVERLAP + LENGTH) * (track_number + 1) - MARGIN) * FREQUENCY:
line_body = f_read_body.readline().strip().split('\t')
line_skeleton = f_read_skeleton.readline().strip().split('\t')
values3 = [0] * len(to_filter)
if i >= (DELAY + OVERLAP + (OVERLAP + LENGTH) * track_number) * FREQUENCY:
values = [float(line_skeleton[j]) for j in to_filter]
for j in range(2, len(values), 3):
if values[j] > 1.4:
values2[j - 2] = values[j - 2]
values2[j - 1] = values[j - 1]
values2[j] = values[j]
for j in range(len(values)):
buffers[j].append(values2[j])
buffers[j] = buffers[j][1:]
for k in range(buf_length):
values3[j] += buffers[j][k] * kernel[k]
values3[j] /= kernel_summ
#emg filtering
emg2 = float(line_body[7])
emg2_nf = emg2
emg2_buffer.append(emg2)
emg2_buffer = emg2_buffer[1:]
emg2 = 0
for k in range(buf_emg_length):
emg2 += emg2_buffer[k] * kernel_emg[k]
emg2 /= kernel_emg_summ
line_body[7] = str(emg2)
#acc filtering
acc_values = [float(v) for v in line_body[3:6]]
for j in range(3):
v = float(line_body[3 + j])
acc_buffer[j].append(v)
acc_buffer[j] = acc_buffer[j][1:]
v2 = 0
for k in range(buf_acc_length):
v2 += acc_buffer[j][k] * kernel_acc[k]
v2 /= kernel_acc_summ
line_body[j + 3] = str(v2)
if i >= (DELAY + OVERLAP + (OVERLAP + LENGTH) * track_number + MARGIN) * FREQUENCY:
result_data['acc1'].append([float(v) - 512 for v in line_body[0:3]])
result_data['acc2'].append([float(v) - 512 for v in line_body[3:6]])
result_data['acc2_nf'].append(acc_values)
result_data['emg1'].append(float(line_body[6]))
result_data['emg2'].append(float(line_body[7]))
result_data['emg2_nf'].append(emg2_nf)
result_data['skeleton'].append({
'r_shoulder': values3[0:3],
'r_elbow': values3[3:6],
'r_hand': values3[6:9],
'l_shoulder': values3[9:12],
'l_elbow': values3[12:15],
'l_hand': values3[15:18],
'r_hip': values3[18:21],
'r_knee': values3[21:24],
'r_foot': values3[24:27],
'l_hip': values3[27:30],
'l_knee': values3[30:33],
'l_foot': values3[33:36]
})
result_data['skeleton_nf'].append({
'r_shoulder': values[0:3],
'r_elbow': values[3:6],
'r_hand': values[6:9],
'l_shoulder': values[9:12],
'l_elbow': values[12:15],
'l_hand': values[15:18],
'r_hip': values[18:21],
'r_knee': values[21:24],
'r_foot': values[24:27],
'l_hip': values[27:30],
'l_knee': values[30:33],
'l_foot': values[33:36]
})
i += 1
with open('fragments/' + str(record_counter) + '.json', "w") as f_write:
json.dump(result_data, f_write)
break
else:
print "Cannot find data for participant ", participant, "\n"
return None
record_counter += 1
return record_counter - 1
with open('selftest/results.txt', 'r') as f:
with open('fragments/log.csv', 'w') as log:
log.write('Participant\tTrack number\tTrack order\tValence\tArousal\tFragment\n')
participant = -1
track_number = 0
for line in f:
ar = line.strip().split(' ');
if ar[0] != participant:
track_number = 0
participant = ar[0]
track_real_number = ar[1]
valence = ar[2]
arousal = ar[3]
record = cut_fragment(participant, track_number)
log.write(participant + '\t' + track_real_number + '\t' + str(track_number) + '\t' + valence + '\t' + arousal + '\t' + str(record) + '\n')
track_number += 1
#break
|
dtysky/Led_Array
|
LED/PCB/Script/script6.py
|
Python
|
gpl-2.0
| 538
| 0.033457
|
import st
|
ring
import struct
out=open('Led.scr','w');
w=202
h=726.8
for j in range(120):
wtf='add'+' '+'connect'+';'+'\n'+'pick'+' '+str(w)+' '+str(h)+';'+'\n'
out.write(wtf)
w=w-1.3
wtf='pick'+' '+str(w)+' '+str(h)+';'+'
|
\n'
out.write(wtf)
wtf='add'+' '+'connect'+';'+'\n'+'pick'+' '+str(w)+' '+str(h)+';'+'\n'
out.write(wtf)
h=h-153
wtf='pick'+' '+str(w)+' '+str(h)+';'+'\n'
out.write(wtf)
w=w+1.3
h=h+153
wtf='done'+';'+'\n'
out.write(wtf)
w=w+2
|
adamgreig/momobot
|
settings.py
|
Python
|
bsd-3-clause
| 508
| 0.001969
|
#
|
-*- coding: utf-8 -*-
# The IRC nickname and password to connect and identify with
NICKNAME = 'momobot_test'
PASSWORD = ''
# The IRC server and port to connect to
SERVER = 'irc.rizon.net'
PORT = 6667
# The channel to join
CHANNEL = '#momotest'
# A list of command indicators
COMMAND_INDICATORS = ['!', '.', 'momo, ']
# How long to wait before replying to a command (secs)
COMMAND_DELAY = 0.3
# The IRC VERSION reply
CTCP_VERSION = 'Momobot v0.1a'
SELFDESTRUCT_USERS = ['Randomskk', 'red
|
d', 'Xaiter']
|
team-hdnet/hdnet
|
tests/test_tmppath.py
|
Python
|
gpl-3.0
| 548
| 0
|
# -*- coding: utf-8 -*-
# This file i
|
s part of the hdnet package
# Copyright 2014 the authors, see file AUTHORS.
# Licensed under the GPLv3, see file LICENSE for details
import os
import unittest
import shutil
class TestTmpPath(unittest.TestCase):
TMP_PATH = '/tmp/hdnettest'
def setUp(self):
if os.path.exists(self.TMP_PATH):
shutil.rmtree(self.TMP_PATH)
os.mkdir(self.TMP_PATH)
def tearDown(self):
if os.
|
path.exists(self.TMP_PATH):
shutil.rmtree(self.TMP_PATH)
# end of source
|
simontakite/sysadmin
|
pythonscripts/learningPython/bothmethods.py
|
Python
|
gpl-2.0
| 486
| 0
|
# File bothmethods.py
cl
|
ass Methods:
def imeth(self, x): # Normal instance method: passed a self
print([self, x])
def smeth(x): # Static: no instance passed
print([x])
def cmeth(cls, x): # Clas
|
s: gets class, not instance
print([cls, x])
smeth = staticmethod(smeth) # Make smeth a static method (or @: ahead)
cmeth = classmethod(cmeth) # Make cmeth a class method (or @: ahead)
|
keflavich/spectral-cube
|
spectral_cube/tests/test_spectral_cube.py
|
Python
|
bsd-3-clause
| 96,662
| 0.005069
|
from __future__ import print_function, absolute_import, division
import re
import copy
import operator
import itertools
import warnings
import mmap
from distutils.version import LooseVersion
import sys
import pytest
import astropy
from astropy import stats
from astropy.io import fits
from astropy import units as u
from astropy.wcs import WCS
from astropy.wcs import _wcs
from astropy.tests.helper import assert_quantity_allclose
from astropy.convolution import Gaussian2DKernel, Tophat2DKernel
from astropy.utils.exceptions import AstropyWarning
import numpy as np
from .. import (BooleanArrayMask,
FunctionMask, LazyMask, CompositeMask)
from ..spectral_cube import (OneDSpectrum, Projection,
VaryingResolutionOneDSpectrum,
LowerDimensionalObject)
from ..np_compat import allbadtonan
from .. import spectral_axis
from .. import base_class
from .. import utils
from .. import SpectralCube, VaryingResolutionSpectralCube, DaskSpectralCube
from . import path
from .helpers import assert_allclose, assert_array_equal
try:
import casatools
ia = casatools.image()
casaOK = True
except ImportError:
try:
from taskinit import ia
casaOK = True
except ImportError:
casaOK = False
WINDOWS = sys.platform == "win32"
# needed to test for warnings later
warnings.simplefilter('always', UserWarning)
warnings.simplefilter('error', utils.UnsupportedIterationStrategyWarning)
warnings.simplefilter('error', utils.NotImplementedWarning)
warnings.simplefilter('error', utils.WCSMismatchWarning)
warnings.simplefilter('error', FutureWarning)
warnings.filterwarnings(action='ignore', category=FutureWarning,
module='reproject')
try:
import yt
YT_INSTALLED = True
YT_LT_301 = LooseVersion(yt.__version__) < LooseVersion('3.0.1')
except ImportError:
YT_INSTALLED = False
YT_LT_301 = False
try:
import scipy
scipyOK = True
except ImportError:
scipyOK = False
import os
# if ON_TRAVIS is set, we're on travis.
on_travis = bool(os.environ.get('ON_TRAVIS'))
from radio_beam import Beam, Beams
from radio_beam.utils import BeamError
NUMPY_LT_19 = LooseVersion(np.__version__) < LooseVersion('1.9.0')
def cube_and_raw(filename, use_dask=None):
if use_dask is None:
raise ValueError('use_dask should be explicitly set')
p = path(filename)
if os.path.splitext(p)[-1] == '.fits':
with fits.open(p) as hdulist:
d = hdulist[0].data
c = SpectralCube.read(p, format='fits', mode='readonly', use_dask=use_dask)
elif os.path.splitext(p)[-1] == '.image':
ia.open(p)
d = ia.getchunk()
ia.unlock()
ia.close()
ia.done()
c = SpectralCube.read(p, format='casa_image', use_dask=use_dask)
else:
raise ValueError("Unsupported filetype")
return c, d
def test_arithmetic_warning(data_vda_jybeam_lower, recwarn, use_dask):
cube, data = cube_and_raw(data_vda_jybeam_lower, use_dask=use_dask)
assert not cube._is_huge
# make sure the small cube raises a warning about loading into memory
with pytest.warns(UserWarning, match='requires loading the entire'):
cube + 5*cube.unit
def test_huge_disallowed(data_vda_jybeam_lower, use_dask):
cube, data = cube_and_raw(data_vda_jybeam_lower, use_dask=use_dask)
assert not cube._is_huge
# We need to reduce the memory threshold rather than use a large cube to
# make sure we don't use too much memory during testing.
from .. import cube_utils
OLD_MEMORY_THRESHOLD = cube_utils.MEMORY_THRESHOLD
try:
cube_utils.MEMORY_THRESHOLD = 10
assert cube._is_huge
with pytest.raises(ValueError, match='entire cube into memory'):
cube + 5*cube.unit
if use_dask:
with pytest.raises(ValueError, match='entire cube into memory'):
cube.mad_std()
else:
with pytest.raises(ValueError, match='entire cube into memory'):
cube.max(how='cube')
cube.allow_huge_operations = True
# just make sure it doesn't fail
cube + 5*cube.unit
finally:
cube_utils.MEMORY_THRESHOLD = OLD_MEMORY_THRESHOLD
del cube
class BaseTest(object):
@pytest.fixture(autouse=True)
def setup_method_fixture(self, request, data_adv, use_dask):
c, d = cube_and_raw(data_adv, use_dask=use_dask)
mask = BooleanArrayMask(d > 0.5, c._wcs)
c._mask = mask
self.c = c
self.mask = mask
self.d = d
class BaseTestMultiBeams(object):
@pytest.fixture(autouse=True)
def setup_method_fixture(self, request, data_adv_beams, use_dask):
c, d = cube_and_raw(data_adv_beams, use_dask=use_dask)
mask = BooleanArrayMask(d > 0.5, c._wcs)
c._mask = mask
self.c = c
self.mask = mask
self.d = d
@pytest.fixture
def filename(request):
return request.getfixturevalue(request.param)
translist = [('data_advs', [0, 1, 2, 3]),
('data_dvsa', [2, 3, 0, 1]),
('data_sdav', [0, 2, 1, 3]),
('data_sadv', [0, 1, 2, 3]),
('data_vsad', [3, 0, 1, 2]),
('data_vad', [2, 0, 1]),
('data_vda', [0, 2, 1]),
('data_adv', [0, 1, 2]),
]
translist_vrsc = [('data_vda_beams', [0, 2, 1])]
class TestSpectralCube(object):
@pytest.mark.parametrize(('filename', 'trans'), translist + translist_vrsc,
indirect=['filename'])
def test_consistent_transposition(self, filename, trans, use_dask):
"""data() should return velocity axis first, then world 1, then world 0"""
c, d = cube_and_raw(filename, use_dask=use_dask)
expected = np.squeeze(d.transpose(trans))
assert_allclose(c._get_filled_data(), expected)
@pytest.mark.parametrize(('filename', 'view'), (
('data_adv', np.s_[:, :,:]),
('data_adv', np.s_[::2, :, :2]),
('data_adv', np.s_[0]),
), indirect=['filename'])
def test_world(self, filename, view, use_dask):
p = path(filename)
# d = fits.getdata(p)
# wcs = WCS(p)
# c = SpectralCube(d, wcs)
c = SpectralCube.read(p)
wcs = c.wcs
# shp = d.shape
# inds = np.indices(d.shape)
shp = c.shape
inds = np.indices(c.shape)
pix = np.column_stack([i.ravel() for i in inds[::-1]])
world = wcs.all_pix2world(pix, 0).T
world = [w.reshape(shp) for w in world]
world = [w[view] * u.Unit(wcs.wcs.cunit[i])
for i, w in enumerate(world)][::-1]
w2 = c.world[view]
for result, expected in zip(w2, world):
assert_allclose(result, expected)
# Test world_flattened here, too
w2_flat = c.flattened_world(view=view)
for result, expected in zip(w2_flat, world):
print(result.shape, expected.flatten().shape)
assert_allclose(result, expected.flatten())
@pytest.mark.parametrize('view', (np.s_[:, :,:],
np.s_[:2, :3, ::2]))
def test_world_transposes_3d(self, view, data_adv, data_vad, use_dask):
c1, d1 = cube_and_raw(data_adv, use_dask=use_dask)
c2, d2 = cube_and_raw(data_vad, use_dask=use_dask)
for w1, w2 in zip(c1.world[view], c2.world[view]):
assert_allclose(w1, w2)
@pytest.mark.parametrize('view',
(np.s_[:, :,:],
np.s_[:2, :3, ::2],
np.s_[::3, ::2, :1],
np.s_[:], ))
def test_world_transposes_4d(self, view, data_advs, data_sadv, use_dask):
c1, d1 = cube_and_raw(data_
|
advs, use_dask=use_dask)
|
c2, d2 = cube_and_raw(data_sadv, use_dask=use_dask)
for w1, w2 in zip(c1.world[view], c2.world[view]):
assert_allclose(w1, w2)
@pytest.mark.parametrize(('filename','masktype','unit','suffix'),
|
ess-dmsc/do-ess-data-simulator
|
DonkiDirector/HDFWriterThread.py
|
Python
|
bsd-2-clause
| 6,785
| 0.040678
|
import time
import threading
import PyTango
import numpy
import h5py
THREAD_DELAY_SEC = 0.1
class HDFwriterThread(threading.Thread):
#-----------------------------------------------------------------------------------
# __init__
#-----------------------------------------------------------------------------------
def __init__(self, parent_obj, filename_in, trg_start, trg_stop):
|
threading.Thread.__init__(self)
self._a
|
live = True
self.myState = PyTango.DevState.OFF
self.filename = filename_in
self.parent = parent_obj
self.trg_start = trg_start
self.trg_stop = trg_stop
self.data_queue = []
self.datasource_finished = {}
self._hdf_file = None
self.timeout_sec = 20
self.MetadataSources = {}
if "_errors" in dir(h5py):
h5py._errors.silence_errors()
#-----------------------------------------------------------------------------------
# set_Metadata_Sources
#-----------------------------------------------------------------------------------
def set_Metadata_Sources(self, MetadataSources):
self.MetadataSources = MetadataSources
#-----------------------------------------------------------------------------------
# notify_new_data
#-----------------------------------------------------------------------------------
def notify_new_data(self, daq_thread, trg):
self.data_queue.append([daq_thread, trg])
#-----------------------------------------------------------------------------------
# store_metadata
#-----------------------------------------------------------------------------------
def store_metadata(self):
for metakey in self.MetadataSources.keys():
if not self.MetadataSources[metakey]['enabled']:
continue
try:
attprx = PyTango.AttributeProxy(self.MetadataSources[metakey]['tango_attr'])
attrinfo = attprx.get_config()
attprx.get_device_proxy().set_timeout_millis(500)
data_in = attprx.read().value
del attprx
except Exception, ex:
self.MetadataSources[metakey]['status'] = 'ALARM'
print "store_metadata, attribute proxy",metakey,ex
continue
#
retries = 0
while retries < 3:
if metakey in self._hdf_file:
break
try:
# Create HDF dataset
dset = self._hdf_file.create_dataset(metakey, data=data_in)
#dset = self._hdf_file[metakey]
dset.attrs["unit"] = attrinfo.unit
break
except Exception, ex:
print "store_metadata",metakey,self.trg_start,ex
retries += 1
#-----------------------------------------------------------------------------------
# store_sync_player_metadata
#-----------------------------------------------------------------------------------
def store_sync_player_metadata(self,daq_thread):
player_metadata = daq_thread.player_metadata
dset = self._hdf_file[daq_thread.player_nickname]
for key in player_metadata.keys():
try:
attprx = PyTango.AttributeProxy(player_metadata[key].tango_attr)
attprx.get_device_proxy().set_timeout_millis(500)
data_in = attprx.read().value
del attprx
#
dset.attrs[key] = data_in
except Exception, ex:
print "store_sync_player_metadata",key,ex
#
# Unit is default
try:
attprx = PyTango.AttributeProxy(daq_thread.player_attrname)
attrinfo = attprx.get_config()
del attprx
#
dset.attrs["unit"] = attrinfo.unit
except Exception, ex:
print "store_sync_player_metadata, deafult unit",daq_thread.player_attrname,ex
#-----------------------------------------------------------------------------------
# store_data
#-----------------------------------------------------------------------------------
def store_data(self, daq_queue_item):
daq_thread = daq_queue_item[0]
trg = daq_queue_item[1]
data_in = daq_thread._data_buffer[trg]
if data_in == None:
return
if daq_thread.player_nickname not in self.datasource_finished.keys():
self.datasource_finished[daq_thread.player_nickname] = False
#
# Create HDF dataset
tokens = daq_thread.player_nickname.split("/")
groupname=""
dsetname = daq_thread.player_nickname
dataset_len = 1+self.trg_stop-self.trg_start
retries = 0
while (retries < 3):
try:
if dsetname in self._hdf_file:
break
if len(numpy.shape(data_in)) == 0: #scalar
self._hdf_file.create_dataset(dsetname,shape=(dataset_len,),dtype=numpy.dtype(type(data_in)))
elif len(numpy.shape(data_in)) == 1: #spectrum
self._hdf_file.create_dataset(dsetname, shape=(dataset_len,data_in.shape[0]), dtype=data_in.dtype)
elif len(numpy.shape(data_in)) == 2: #image
self._hdf_file.create_dataset(dsetname, shape=(dataset_len,data_in.shape[0],data_in.shape[1]), dtype=data_in.dtype)
break
except Exception, ex:
print "Create Dataset",dsetname,data_in,len(numpy.shape(data_in)),dataset_len,"\n",ex
retries += 1
#
self.store_sync_player_metadata(daq_thread)
#
retries = 0
while (retries < 3):
#update the dataset
try:
dset = self._hdf_file.get(daq_thread.player_nickname, None)
dset[slice(trg - self.trg_start,trg - self.trg_start+1)] = data_in
break
except Exception, ex:
retries += 1
print "Update Dataset",ex
#
if trg == self.trg_stop:
self.datasource_finished.pop(daq_thread.player_nickname)
#-----------------------------------------------------------------------------------
# close_file
#-----------------------------------------------------------------------------------
def close_file(self):
try:
#
data_in=numpy.arange(self.trg_start,self.trg_stop+1)
self._hdf_file.create_dataset("triggers", data = data_in)
#
self.store_metadata()
#
self._hdf_file.flush()
self._hdf_file.close()
self.parent.report_message("Closed file "+self.filename)
except Exception, ex:
print "Closing File",ex
self.parent.notify_hdf_file_finished(self)
#-----------------------------------------------------------------------------------
# run
#-----------------------------------------------------------------------------------
def run(self):
try:
self._hdf_file = h5py.File(self.filename,'w')
self.parent.report_message("Opened file "+self.filename)
except Exception, ex:
print ex
self.parent.report_message("Unable to Open file "+self.filename)
return
last_store_time = time.time()
while self._alive:
while len(self.data_queue):
#
self.store_data(self.data_queue[0])
del self.data_queue[0]
last_store_time = time.time()
#
if len(self.datasource_finished) == 0:
self.close_file()
if self.parent._paused:
last_store_time = time.time()
elif (time.time() - last_store_time) > self.timeout_sec:
print "TIMEOUT",self.filename
self.close_file()
last_store_time = time.time()
time.sleep(THREAD_DELAY_SEC)
|
abelcarreras/aiida_extensions
|
setup.py
|
Python
|
mit
| 2,089
| 0.00383
|
from setuptools import setup, find_packages
setup(
name='aiida-phonon',
version='0.1',
description='AiiDA plugin for running phonon calculations using phonopy',
url='https://github.com/abelcarreras/aiida_extensions',
author='Abel Carreras',
author_email='abelcarreras@gmail.com',
license='MIT license',
packages=find_packages(exclude=['aiida']),
requires=['phonopy', 'numpy', 'dynaphopy'],
setup_requires=['reentry'],
reentry_register=True,
entry_points={
'aiida.calculations': [
'lammps.combinate = plugins.jobs.lammps.combinate:CombinateCalculation',
'lammps.force = plugins.jobs.lammps.force:ForceCalculation',
'lammps.md = plugins.jobs.lammps.md:MdCalculation',
'lammps.optimize = plugins.jobs.lammps.optimize:OptimizeCalculation',
# 'vasp.vasp = plugins.jobs.vasp:VaspCalculation',
'phonopy = plugins.jobs.phonopy: PhonopyCalculation',
'dynaphopy = plugins.jobs.dynaphopy: DynaphopyCalculation'],
'aiida.parsers': [
'lammps.force = plugins.parsers.lammps.force:ForceParser',
'lammps.md = plugins.parsers.lammps.md:MdParser',
'lammps.optimize = plugins.parsers.lammps.optimize:OptimizeParser',
# 'vasp.vasp = plugins.parsers.vasp:VaspParser',
'phonopy = plugins.parsers.phonopy: PhonopyParser',
'dynaphopy = plugins.parsers.dynaphopy: DynaphopyParser'],
'aiida.workflows': [
'wf_phonon = workflows.wf_phonon:Wf_phononWorkflow',
'wf_gruneisen_pressure = workflows.w
|
f_gruneisen_pressure:WorkflowGruneisen',
|
'wf_gruneisen_volume = workflows.wf_gruneisen_volume:WorkflowGruneisen',
'wf_qha = workflows.qha:WorkflowQHA',
'wf_quasiparticle = workflows.quasiparticle:WorkflowQuasiparticle',
'wf_quasiparticle_thermo = workflows.wf_quasiparticle_thermo:WorkflowQuasiparticle',
'wf_scan_quasiparticle = workflows.wf_scan_quasiparticle:WorkflowScanQuasiparticle',
]
}
)
|
valmynd/MediaFetcher
|
src/plugins/youtube_dl/youtube_dl/extractor/hearthisat.py
|
Python
|
gpl-3.0
| 4,347
| 0.027605
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
HEADRequest,
KNOWN_EXTENSIONS,
sanitized_Request,
str_to_int,
urlencode_postdata,
urlhandle_detect_ext,
)
class HearThisAtIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?hearthis\.at/(?P<artist>[^/]+)/(?P<title>[A-Za-z0-9\-]+)/?$'
_PLAYLIST_URL = 'https://hearthis.at/playlist.php'
_TESTS = [{
'url': 'https://hearthis.at/moofi/dr-kreep',
'md5': 'ab6ec33c8fed6556029337c7885eb4e0',
'info_dict': {
'id': '150939',
'ext': 'wav',
'title': 'Moofi - Dr. Kreep',
'thumbnail': r're:^https?://.*\.jpg$',
'timestamp': 1421564134,
'description': 'Listen to Dr. Kreep by Moofi on hearthis.at - Modular, Eurorack, Mutable Intruments Braids, Valhalla-DSP',
'upload_date': '20150118',
'comment_count': int,
'view_count': int,
'like_count': int,
'duration': 71,
'categories': ['Experimental'],
}
}, {
# 'download' link redirects to the original webpage
'url': 'https://hearthis.at/twitchsf/dj-jim-hopkins-totally-bitchin-80s-dance-mix/',
'md5': '5980ceb7c461605d30f1f039df160c6e',
'info_dict': {
'id': '811296',
'ext': 'mp3',
'title': 'TwitchSF - DJ Jim Hopkins - Totally Bitchin\' 80\'s Dance Mix!',
'description': 'Listen to DJ Jim Hopkins - Totally Bitchin\' 80\'s Dance Mix! by TwitchSF on hearthis.at - Dance',
'upload_date': '20160328',
'timestamp': 1459186146,
'thumbnail': r're:^https?://.*\.jpg$',
'comment_count': int,
'view_count': int,
'like_count': int,
'duration': 4360,
'categories': ['Dance'],
},
}]
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
display_id = '{artist:s} - {title:s}'.format(**m.groupdict())
webpage = self._download_webpage(url, display_id)
track_id = self._search_regex(
r'intTrackId\s*=\s*(\d+)', webpage, 'track ID')
payload = urlencode_postdata({'tracks[]': track_id})
req = sanitized_Request(self._PLAYLIST_URL, payload)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
track = self._download_json(req, track_id, 'Downloading playlist')[0]
title = '{artist:s} - {title:s}'.format(**track)
categories = None
if track.get('category'):
categories = [track['category']]
description = self._og_search_description(webpage)
thumbnail = self._og_search_thumbnail(webpage)
|
meta_span = r'<span[^>]+class="%s".*?</i>([^<]+)</span>'
view_count = str_to_int(self._search_regex(
meta_span % 'plays_count', webpage, 'view count', fatal=False))
like_count = str_to_int(self._search_regex(
meta_span % 'likes_count', webpage, 'like count', fatal=False))
comment_count = str_to_int(self._search_regex(
meta_span % 'comment_count', webpage, 'comment count', fatal=False))
duration = str_to_int(self._search_regex(
r'data-length="
|
(\d+)', webpage, 'duration', fatal=False))
timestamp = str_to_int(self._search_regex(
r'<span[^>]+class="calctime"[^>]+data-time="(\d+)', webpage, 'timestamp', fatal=False))
formats = []
mp3_url = self._search_regex(
r'(?s)<a class="player-link"\s+(?:[a-zA-Z0-9_:-]+="[^"]+"\s+)*?data-mp3="([^"]+)"',
webpage, 'mp3 URL', fatal=False)
if mp3_url:
formats.append({
'format_id': 'mp3',
'vcodec': 'none',
'acodec': 'mp3',
'url': mp3_url,
})
download_path = self._search_regex(
r'<a class="[^"]*download_fct[^"]*"\s+href="([^"]+)"',
webpage, 'download URL', default=None)
if download_path:
download_url = compat_urlparse.urljoin(url, download_path)
ext_req = HEADRequest(download_url)
ext_handle = self._request_webpage(
ext_req, display_id, note='Determining extension')
ext = urlhandle_detect_ext(ext_handle)
if ext in KNOWN_EXTENSIONS:
formats.append({
'format_id': 'download',
'vcodec': 'none',
'ext': ext,
'url': download_url,
'preference': 2, # Usually better quality
})
self._sort_formats(formats)
return {
'id': track_id,
'display_id': display_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
'description': description,
'duration': duration,
'timestamp': timestamp,
'view_count': view_count,
'comment_count': comment_count,
'like_count': like_count,
'categories': categories,
}
|
saltstack/salt
|
tests/unit/modules/test_djangomod.py
|
Python
|
apache-2.0
| 8,081
| 0.001114
|
"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
import sys
import pytest
import salt.modules.djangomod as djangomod
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
class DjangomodTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.djangomod
"""
def setup_loader_modules(self):
patcher = patch("salt.utils.path.which", lambda exe: exe)
patcher.start()
self.addCleanup(patcher.stop)
return {
djangomod: {"_get_django_admin": MagicMock(return_value="django-admin.py")}
}
# 'command' function tests: 1
def test_command(self):
"""
Test if it runs arbitrary django management command
"""
mock = MagicMock(return_value=True)
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
self.assertTrue(djangomod.command("DJANGO_SETTINGS_MODULE", "validate"))
# 'syncdb' function tests: 1
def test_syncdb(self):
"""
Test if it runs the Django-Admin syncdb command
"""
mock = MagicMock(return_value=True)
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
self.assertTrue(djangomod.syncdb("DJANGO_SETTINGS_MODULE"))
# 'migrate' function tests: 1
def test_migrate(self):
"""
Test if it runs the Django-Admin migrate command
"""
mock = MagicMock(return_value=True)
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
self.assertTrue(djangomod.migrate("DJANGO_SETTINGS_MODULE"))
# 'createsuperuser' function tests: 1
def test_createsuperuser(self):
"""
Test if it create a super user for the database.
"""
mock = MagicMock(return_value=True)
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
self.assertTrue(
djangomod.createsuperuser(
"DJANGO_SETTINGS_MODULE", "SALT", "salt@slatstack.com"
)
)
# 'loaddata' function tests: 1
def test_loaddata(self):
"""
Test if it loads fixture data
"""
mock = MagicMock(return_value=True)
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
self.assertTrue(djangomod.loaddata("DJANGO_SETTINGS_MODULE", "mydata"))
# 'collectstatic' function tests: 1
def test_collectstatic(self):
"""
Test if it collect static files from each of your applications
into a single location
"""
mock = MagicMock(return_value=True)
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
self.assertTrue(djangomod.collectstatic("DJANGO_SETTINGS_MODULE"))
class DjangomodCliCommandTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.djangomod
"""
def setup_loader_modules(self):
patcher = patch("salt.utils.path.which", lambda exe: exe)
patcher.start()
self.addCleanup(patcher.stop)
return {djangomod: {}}
def test_django_admin_cli_command(self):
mock = MagicMock()
|
with patch.dict(djangomod.__salt__,
|
{"cmd.run": mock}):
djangomod.command("settings.py", "runserver")
mock.assert_called_once_with(
"django-admin.py runserver --settings=settings.py",
python_shell=False,
env=None,
runas=None,
)
def test_django_admin_cli_command_with_args(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.command(
"settings.py",
"runserver",
None,
None,
None,
None,
"noinput",
"somethingelse",
)
mock.assert_called_once_with(
"django-admin.py runserver --settings=settings.py "
"--noinput --somethingelse",
python_shell=False,
env=None,
runas=None,
)
def test_django_admin_cli_command_with_kwargs(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.command(
"settings.py", "runserver", None, None, None, database="something"
)
mock.assert_called_once_with(
"django-admin.py runserver --settings=settings.py --database=something",
python_shell=False,
env=None,
runas=None,
)
def test_django_admin_cli_command_with_kwargs_ignore_dunder(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.command(
"settings.py", "runserver", None, None, None, __ignore="something"
)
mock.assert_called_once_with(
"django-admin.py runserver --settings=settings.py",
python_shell=False,
env=None,
runas=None,
)
def test_django_admin_cli_syncdb(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.syncdb("settings.py")
mock.assert_called_once_with(
"django-admin.py syncdb --settings=settings.py --noinput",
python_shell=False,
env=None,
runas=None,
)
def test_django_admin_cli_syncdb_migrate(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.syncdb("settings.py", migrate=True)
mock.assert_called_once_with(
"django-admin.py syncdb --settings=settings.py --migrate --noinput",
python_shell=False,
env=None,
runas=None,
)
def test_django_admin_cli_migrate(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.migrate("settings.py")
mock.assert_called_once_with(
"django-admin.py migrate --settings=settings.py --noinput",
python_shell=False,
env=None,
runas=None,
)
@pytest.mark.skipif(
sys.version_info < (3, 6), reason="Py3.5 dictionaries are not ordered"
)
def test_django_admin_cli_createsuperuser(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.createsuperuser("settings.py", "testuser", "user@example.com")
self.assertEqual(mock.call_count, 1)
mock.assert_called_with(
"django-admin.py createsuperuser --settings=settings.py --noinput "
"--email=user@example.com --username=testuser",
env=None,
python_shell=False,
runas=None,
)
def no_test_loaddata(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.loaddata("settings.py", "app1,app2")
mock.assert_called_once_with(
"django-admin.py loaddata --settings=settings.py app1 app2",
)
def test_django_admin_cli_collectstatic(self):
mock = MagicMock()
with patch.dict(djangomod.__salt__, {"cmd.run": mock}):
djangomod.collectstatic(
"settings.py", None, True, "something", True, True, True, True
)
mock.assert_called_once_with(
"django-admin.py collectstatic --settings=settings.py "
"--noinput --no-post-process --dry-run --clear --link "
"--no-default-ignore --ignore=something",
python_shell=False,
env=None,
runas=None,
)
|
TheTimmy/spack
|
var/spack/repos/builtin/packages/slurm/package.py
|
Python
|
lgpl-2.1
| 4,160
| 0.00024
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Slurm(AutotoolsPackage):
"""Slurm is an open source, fault-tolerant, and highly scalable cluster
management and job scheduling system for large and small Linux clusters.
Slurm requires no kernel modifications for its operation and is relatively
self-contained. As a cluster workload manager, Slurm has three key
functions. First, it allocates exclusive and/or non-exclusive access to
resources (compute nodes) t
|
o users for some duration of time so they can
perform work. Second, it provides a framework for starting, executing,
and monitoring work (normally a parallel job
|
) on the set of allocated
nodes. Finally, it arbitrates contention for resources by managing a
queue of pending work.
"""
homepage = 'https://slurm.schedmd.com'
url = 'https://github.com/SchedMD/slurm/archive/slurm-17-02-6-1.tar.gz'
version('17-02-6-1', '8edbb9ad41819464350d9de013367020')
variant('gtk', default=False, description='Enable GTK+ support')
variant('mariadb', default=False, description='Use MariaDB instead of MySQL')
variant('hwloc', default=False, description='Enable hwloc support')
variant('hdf5', default=False, description='Enable hdf5 support')
variant('readline', default=True, description='Enable readline support')
# TODO: add variant for BG/Q and Cray support
# TODO: add support for checkpoint/restart (BLCR)
# TODO: add support for lua
depends_on('curl')
depends_on('glib')
depends_on('json-c')
depends_on('lz4')
depends_on('munge')
depends_on('openssl')
depends_on('pkg-config', type='build')
depends_on('readline')
depends_on('zlib')
depends_on('gtkplus+X', when='+gtk')
depends_on('hdf5', when='+hdf5')
depends_on('hwloc', when='+hwloc')
depends_on('mariadb', when='+mariadb')
def configure_args(self):
spec = self.spec
args = [
'--with-libcurl={0}'.format(spec['curl'].prefix),
'--with-json={0}'.format(spec['json-c'].prefix),
'--with-lz4={0}'.format(spec['lz4'].prefix),
'--with-munge={0}'.format(spec['munge'].prefix),
'--with-ssl={0}'.format(spec['openssl'].prefix),
'--with-zlib={0}'.format(spec['zlib'].prefix),
]
if '~gtk' in spec:
args.append('--disable-gtktest')
if '+readline' in spec:
args.append('--with-readline={0}'.format(spec['readline'].prefix))
else:
args.append('--without-readline')
if '+hdf5' in spec:
args.append(
'--with-hdf5={0}'.format(spec['hdf5'].prefix.bin.h5cc)
)
else:
args.append('--without-hdf5')
if '+hwloc' in spec:
args.append('--with-hwloc={0}'.format(spec['hwloc'].prefix))
else:
args.append('--without-hwloc')
return args
|
usc-isi-i2/etk
|
etk/data_extractors/htiExtractors/utils.py
|
Python
|
mit
| 18,615
| 0.017029
|
"""
This class is used to speed up general, day-to-day programming needs. It contains a variety of
very commonly used functions - anything from retrieving a custom list of dates to
retrieving Dictionaries of Backpage cities' coordinates.
"""
from copy import deepcopy
import csv
from datetime import datetime, timedelta
import pkg_resources
import re
from pymongo import MongoClient
def all_cities():
"""
Get a list of all Backpage city names.
Returns:
list of city names as Strings
"""
cities = []
fname = pkg_resources.resource_filename(__name__, 'resources/CityPops.csv')
with open(fname, 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
for row in reader:
cities.append(row[0])
cities.sort()
return cities
def all_days(boo):
"""
Return a list of all dates from 11/12/2015 to the present.
Args:
boo: if true, list contains Numbers (20151230); if false, list contains Strings ("2015-12-30")
Returns:
list of either Numbers or Strings
"""
earliest = datetime.strptime(('2015-11-12').replace('-', ' '), '%Y %m %d')
latest = datetime.strptime(datetime.today().date().isoformat().replace('-', ' '), '%Y %m %d')
num_days = (latest - earliest).days + 1
all_days = [latest - timedelta(days=x) for x in range(num_days)]
all_days.reverse()
output = []
if boo:
# Return as Integer, yyyymmdd
for d in all_days:
output.append(int(str(d).replace('-', '')[:8]))
else:
# Return as String, yyyy-mm-dd
for d in all_days:
output.append(str(d)[:10])
return output
def cities_clean(cits):
output = ''
cities = sorted(cits, key=lambda k: k['city'])
for cit_pair in cities:
day_str = ' days'
if cit_pair['count'] == 1:
day_str = ' day'
output = output + cit_pair['city'] + ': ' + str(cit_pair['count']) + day_str + '<br>'
output = output[:-4]
return output
def cities_clean(cits):
output = []
cities = sorted(cits, key=lambda k: k['city'])
for cit_pair in cities:
day_str = ' days'
if cit_pair['count'] == 1:
day_str = ' day'
output.append(''.join([cit_pair['city'], ': ', str(cit_pair['count']), day_str]))
# output = output + cit_pair['city'] + ': ' + str(cit_pair['count']) + day_str + '<br>'
# output = output[:-4]
return output
def city_nums():
"""
Get a dictionary of Backpage city names mapped to their 'legend' value.
Returns:
dictionary of Backpage city names mapped to their numeric value
"""
city_nums = {}
first_row = 1
num = 0
fname = pkg_resources.resource_filename(__name__, 'resources/Distance_Matrix.csv')
with open(fname, 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
for row in reader:
if first_row == 1:
first_row = 0
else:
city_nums[row[0]] = num
num = num + 1
return city_nums
def countries():
countries = []
with open('dataFiles/country_names.csv', 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
for row in reader:
countries.append(row[0].lower().replace(' ', ''))
return set(countries)
def date_clean(date, dashboard_style=False):
"""
Clean the numerical date value in order to present it.
Args:
boo: numerical date (20160205)
Returns:
Stringified version of the input date ("2016-02-05")
"""
if dashboard_style:
dt = str(date)
out = dt[4:6] + '/' + dt[6:] + '/' + dt[:4]
else:
dt = str(date)
out = dt[:4] + '-' + dt[4:6] + '-' + dt[6:]
return out
# Get the formatted date for a day N days go
def date_n_days_ago(n, clean=False):
day = datetime.strptime((datetime.today() - timedelta(n)).date().isoformat().replace('-', ' '), '%Y %m %d')
if not clean:
return int(str(day).replace('-', '')[:8])
else:
return str(day)[:10]
def date_range(start, end, boo):
"""
Return list of dates within a specified range, inclusive.
Args:
start: earliest date to include, String ("2015-11-25")
end: latest date to include, String ("2015-12-01")
boo: if true, output list contains Numbers (20151230); if false, list contains Strings ("2015-12-30")
Returns:
list of either Numbers or Strings
"""
earliest = datetime.strptime(start.replace('-', ' '), '%Y %m %d')
latest = datetime.strptime(end.replace('-', ' '), '%Y %m %d')
num_days = (latest - earliest).days + 1
all_days = [latest - timedelta(days=x) for x in range(num_days)]
all_days.reverse()
output = []
if boo:
# Return as Integer, yyyymmdd
for d in all_days:
output.append(int(str(d).replace('-', '')[:8]))
else:
# Return as String, yyyy-mm-dd
for d in all_days:
output.append(str(d)[:10])
return output
def dicts_equal(d1, d2):
"""
Perform a deep comparison of two dictionaries
Handles:
- Primitives
- Nested dicts
- Lists of primitives
"""
# check for different sizes
if len(d1) != len(d2):
return False
# check for different keys
for k in d1:
if k not in d2:
return False
for k in d2:
if k not in d1:
return False
# compare each element in dict
for k in d1:
if type(d1[k]) != type(d2[k]):
# different value types
return False
# lists
elif isinstance(d1[k], list):
if not (sorted(d1[k]) == sorted(d2[k])):
return False
# nested dicts
elif isinstance(d1[k], dict):
if not dicts_equal(d1[k], d2[k]):
return False
# primitives
else:
if d1[k] != d2[k]:
return False
return True
def distances():
"""
Get all distances between all cities in miles (matrix-style).
Returns:
dictionary of Backpage city names mapped to a list of distances, one for every other city
"""
distances = {}
# Instantiate a matrix of distances (in miles) between all cities
num = 0
top_row = 1
fname = pkg_resources.resource_filename(__name__, 'resources/Distance_Matrix.csv')
with open(fname, 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
for row in reader:
if top_row == 1:
top_row = 0
else:
# Map each city to a list of distances to all other cities.
vals = []
for item in row[1:]:
if not item:
continue
try:
vals.append(int(float(item)))
except ValueError:
print 'Invalid data type for row {} with value {}, column value: {}'.format(num, row[0], item)
distances[num] = vals
num += 1
return distances
def ethnicity_nums():
# Map ethnicities to a num
|
eric value
ethn_legend = {}
ethn_list = ['white_non_hispanic', 'hispanic_latino', 'american_indian', 'asian', 'midEast_nAfrican',
'african_american', 'subsaharan_african', 'multiracial']
for e in range(len(ethn_list)):
ethn_legend[ethn_list[e]] = e+1
return ethn_legend
def ethnicities_clean():
""" Get dictionary of unformatted ethnicity types mapped to clean corresponding ethnicity strings """
eths_clea
|
n = {}
fname = pkg_resources.resource_filename(__name__, 'resources/Ethnicity_Groups.csv')
with open(fname, 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
first = []
for row in reader:
if first:
for i in range(len(first)):
if first[i] and row[i]:
eths_clean[first[i]] = row[i]
first = []
else:
first = deepcopy(row)
return eths_clean
def filter_text(body, title):
output = body.replace('\\r', '').replace('\\n', '').replace('&', '')
# Remove HTML tags
output = re.sub(r'<.*?>', ' ', output)
# Remove successive spaces
output = re.sub(r' +', ' ', output)
output = title + ' ~~~ ' + output
return output
def formal_cities(reverse=False):
"""
Get a dictionary that maps all Backpage city names to their presentable, formal names.
Returns:
dictionary of Backpage city names mapped to formal city names
"""
output = {}
fname = pkg_resources.resource_filename(__name__, 'resources/Formal_City_Name_Pairs.csv')
with open(fname, 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
for row in reader:
if not reverse:
|
zhsso/ubunto-one
|
src/backends/db/schemas/txlog/patch_4.py
|
Python
|
agpl-3.0
| 1,421
| 0
|
# Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Ge
|
neral Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Publ
|
ic License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""Add db_worker_unseen table to keep track of unseen items on the database
side.
"""
SQL = [
"""
CREATE TABLE txlog.db_worker_unseen (
id INTEGER NOT NULL,
worker_id TEXT NOT NULL,
created TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT \
timezone('UTC'::text, now())
)
""",
"""
GRANT SELECT, INSERT, UPDATE, DELETE
ON TABLE txlog.db_worker_unseen
TO storage, webapp
""",
"""
CREATE INDEX db_worker_unseen_idx
ON txlog.db_worker_unseen(worker_id, created, id)
"""
]
def apply(store):
"""Apply the patch"""
for statement in SQL:
store.execute(statement)
|
daaoling/KBEngine-LearnNote
|
kbengine_demos_assets/scripts/cell/kbengine.py
|
Python
|
gpl-2.0
| 1,646
| 0.056657
|
# -*- codin
|
g: utf-8 -*-
import KBEngine
from KBEDebug import *
import dialogmgr
import skills
def onInit(isReload):
"""
KBEngine method.
当引擎启动后初始化完所有的脚本后这个接口被调用
"""
DEBUG_MSG('onInit::isReload:%s' % isReload)
dialogmgr.onInit()
skills.onInit()
def onGlobalData(key, value):
"""
KBEngine method.
globalData改变
"""
DEBUG_MSG('onGlobalData: %s' % key)
def onGlobalDataDel(key):
"""
KBEngine method.
globalData删除
"""
DEBUG_MSG('onDelGlobalData: %s' % key)
def onCellAppDa
|
ta(key, value):
"""
KBEngine method.
cellAppData改变
"""
DEBUG_MSG('onCellAppData: %s' % key)
def onCellAppDataDel(key):
"""
KBEngine method.
cellAppData删除
"""
DEBUG_MSG('onCellAppDataDel: %s' % key)
def onSpaceData( spaceID, key, value ):
"""
KBEngine method.
spaceData改变
@spaceID: 数据被设置在这个spaceID的space中.
@key: 被设置的key.
@value: 被设置的值, 如果值被删除则为None.
"""
DEBUG_MSG('onSpaceData: spaceID=%s, key=%s, value=%s.' % (spaceID, key, value))
def onSpaceGeometryLoaded(spaceID, mapping):
"""
KBEngine method.
space 某部分或所有chunk等数据加载完毕
具体哪部分需要由cell负责的范围决定
"""
DEBUG_MSG('onSpaceGeometryLoaded: spaceID=%s, mapping=%s.' % (spaceID, mapping))
def onAllSpaceGeometryLoaded(spaceID, isBootstrap, mapping):
"""
KBEngine method.
space 某部分或所有chunk等数据加载完毕
具体哪部分需要由cell负责的范围决定
"""
DEBUG_MSG('onAllSpaceGeometryLoaded: spaceID=%s, isBootstrap=%i, mapping=%s.' % (spaceID, isBootstrap, mapping))
|
ComputerNetworks-UFRGS/OpERA
|
python/algorithm/qa_test.py
|
Python
|
apache-2.0
| 3,935
| 0.00432
|
"""
Copyright 2013 OpERA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
#!/usr/bin/python
## @package algorithm
# ::TODO:: Discover how to include patches externally
import sys
import os
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))
sys.path.insert(0, path)
import unittest
import random
# Modules tested
from feedbackAlgorithm import FeedbackAlgorithm, ExponentialTimeFeedback, KunstTimeFeedback
# Other modules needed
from device import radioDevice
from abstractAlgorithm import AbstractAlgorithm
class QaAlgorithm(unittest.TestCase):
"""
Test algorithm module.
"""
def test_feedback_001(self):
"""
Test the feedback algorithm.
"""
mi = 1,
ma = 256
base = 3
obj = ExponentialTimeFeedback(min_time=mi,
max_time=ma,
base=base
)
# Estado inicial
# Initial state.
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
# 3 ^ 0 == 1 (wait is 1)
self.assertEqual(True, obj.feedback())
# Testa se voltou direito
# Test if got back correctly.
self.assertEqual(False, obj.feedback())
# Aumentamos o tempo de sensoriamento 3^1 = 3
# We increase the sensing time 3^1 = 3.
obj.increase_time()
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 2
obj.wait() # wait = 3
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback())
obj.decrease_time() # reset time 3^0 = 1 # reseta tempo 3^0 = 1
obj.wait() # wait = 1
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
def test_feedback_002(self):
"""
Test the feedback algorithm
"""
obj = KunstTimeFeedback()
# Estado inicial
# Initial state.
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
# 2 ^ 0 == 1
# wait = 0
self.assertEqual(True, obj.feedback())
# Aumentamos o tempo de sensoriamento 2^1 = 2
# We increase the sensing time 2^1 = 2
obj.increase_time()
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 2
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback()) # wait gets back to 0 volta wait para 0
obj.wait() # wait = 1
obj.wait() # wait = 2
obj.w
|
ait()
|
# wait = 3
obj.wait() # wait = 4
obj.increase_time() # 2^2 = 4
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback()) # wait gets back to 0 # volta wait para 0
obj.decrease_time() # Should be 2^1 = 2
obj.wait()
obj.wait()
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback()) # wait gets back to 0 # volta wait para 0
if __name__ == '__main__':
unittest.main()
|
cangencer/hazelcast-python-client
|
hazelcast/protocol/codec/semaphore_init_codec.py
|
Python
|
apache-2.0
| 1,147
| 0.000872
|
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.util import ImmutableLazyDataList
from hazelcast.protocol.codec.semaphore_message_type import *
REQUEST_TYPE = SEMAPHORE_INIT
RESPONSE_TYPE = 101
RETRYABLE = False
def calculate_size(name, permits):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size
|
+= INT_SIZE_IN_BYTES
return data_size
def encode_request(name, permits):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name, permits))
client_message.set_message_type(REQUEST_TYPE)
|
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.append_int(permits)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
parameters['response'] = client_message.read_bool()
return parameters
|
galaxyproject/pulsar
|
test/authorization_test.py
|
Python
|
apache-2.0
| 1,340
| 0.002239
|
from pulsar.tools.authorization import get_authorizer
from .test_utils import get_test_toolbox, TestCase
def test_allow_any_authorization():
authorizer = get_authorizer(None)
authorization = authorizer.get_authorization('tool1')
authorization.authorize_setup()
authorization.authorize_tool_file('cow', '#!/bin/bash\necho "Hello World!"')
class ToolBasedAuthorizationTestCase(TestCase):
def setUp(self):
self.toolbox = get_test_toolbox()
self.authorizer = get_authorizer(self.toolbox)
def test_valid_setup_passes(self):
self.authorizer.get_authorization('tool1').autho
|
rize_setup()
def test_invalid_setup_fails(self):
with self.unauthorized_expectation():
self.authorizer.get_authoriza
|
tion('tool2').authorize_setup()
def test_valid_tool_file_passes(self):
authorization = self.authorizer.get_authorization('tool1')
authorization.authorize_tool_file('tool1_wrapper.py', 'print \'Hello World!\'\n')
def test_invalid_tool_file_fails(self):
authorization = self.authorizer.get_authorization('tool1')
with self.unauthorized_expectation():
authorization.authorize_tool_file('tool1_wrapper.py', '#!/bin/sh\nrm -rf /valuable/data')
def unauthorized_expectation(self):
return self.assertRaises(Exception)
|
efarres/GoIPbus
|
cactuscore/softipbus/scripts/ctp6_integration_patterns.py
|
Python
|
gpl-2.0
| 325
| 0.006154
|
# Map the oRSC fiber indices to CTP fiber indices
FIBER_MAP = {
24: 0x5,
25: 0x4,
26: 0x8,
27
|
: 0xb,
28: 0x6,
29: 0x7
}
from integration_patterns import pattern as orsc_pattern
def pattern(link):
if link in FIBER_MAP:
return orsc_pattern(FIBER_MAP[link]-1)
r
|
eturn orsc_pattern(link)
|
wangzitian0/BOJ-V4
|
submission/views.py
|
Python
|
mit
| 2,454
| 0.000407
|
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from .models import Submission
from .serializers import SubmissionSerializer
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from problem.models import Problem
from django.shortcuts import get_object_or_404
from .forms import SubmissionForm
from django_tables2 import RequestConfig
from .tables import SubmissionTable
# from guardian.shortcuts import get_objects_for_user
class SubmissionViewSet(viewsets.ModelViewSet):
queryset = Submission.objects.all()
serializer_class = SubmissionSerializer
permission_classes = (IsAuthenticated,)
class SubmissionListView(ListView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionListView, self).get_context_data(**kwargs)
submissions_table = S
|
ubmissionTable(self.get_queryset())
RequestConfig(self.request).configure(submissions_table)
# add filter here
context['submissions_table'] = submissions_table
return context
class SubmissionDetailView(DetailView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionDetailView, self).get_context_data(
|
**kwargs)
return context
class SubmissionCreateView(CreateView):
model = Submission
form_class = SubmissionForm
template_name_suffix = '_create_form'
@method_decorator(login_required)
def dispatch(self, request, pid=None, *args, **kwargs):
pid = self.kwargs['pid']
self.problem = get_object_or_404(Problem.objects.all(), pk=pid)
return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kw = super(SubmissionCreateView, self).get_form_kwargs()
kw['qs'] = self.problem.allowed_lang.all()
return kw
def get_context_data(self, **kwargs):
context = super(SubmissionCreateView, self).get_context_data(**kwargs)
context['problem'] = self.problem
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.problem = self.problem
self.object.user = self.request.user
return super(SubmissionCreateView, self).form_valid(form)
|
python-poetry/poetry-core
|
src/poetry/core/spdx/license.py
|
Python
|
mit
| 5,634
| 0.000887
|
from collections import namedtuple
from typing import Optional
class License(namedtuple("License", "id name is_osi_approved is_deprecated")):
id: str
name: str
is_osi_approved: bool
is_deprecated: bool
CLASSIFIER_SUPPORTED = {
# Not OSI Approved
"Aladdin",
"CC0-1.0",
"CECILL-B",
"CECILL-C",
"NPL-1.0",
"NPL-1.1",
# OSI Approved
"AFPL",
"AFL-1.1",
"AFL-1.2",
"AFL-2.0",
"AFL-2.1",
"AFL-3.0",
"Apache-1.1",
"Apache-2.0",
"APSL-1.1",
"APSL-1.2",
"APSL-2.0",
"Artistic-1.0",
"Artistic-2.0",
"AAL",
"AGPL-3.0",
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"BSL-1.0",
"BSD-2-Clause",
"BSD-3-Clause",
"CDDL-1.0",
"CECILL-2.1",
"CPL-1.0",
"EFL-1.0",
"EFL-2.0",
"EPL-1.0",
"EPL-2.0",
"EUPL-1.1",
"EUPL-1.2",
"GPL-2.0",
"GPL-2.0+",
"GPL-2.0-only",
"GPL-2.0-or-later",
"GPL-3.0",
"GPL-3.0+",
"GPL-3.0-only",
"GPL-3.0-or-later",
"LGPL-2.0",
"LGPL-2.0+",
"LGPL-2.0-only",
"LGPL-2.0-or-later",
"LGPL-3.0",
"LGPL-3.0+",
"LGPL-3.0-only",
"LGPL-3.0-or-later",
"MIT",
"MPL-1.0",
"MPL-1.1",
"MPL-1.2",
"Nokia",
"W3C",
"ZPL-1.0",
"ZPL-2.0",
"ZPL-2.1",
}
CLASSIFIER_NAMES = {
# Not OSI Approved
"AFPL": "Aladdin Free Public License (AFPL)",
"CC0-1.0": "CC0 1.0 Universal (CC0 1.0) Public Domain Dedication",
"CECILL-B": "CeCILL-B Free Software License Agreement (CECILL-B)",
"CECILL-C": "CeCILL-C Free Software License Agreement (CECILL-C)",
"NPL-1.0": "Netscape Public License (NPL)",
"NPL-1.1": "Netscape Public License (NPL)",
# OSI Approved
"AFL-1.1": "Academic Free License (AFL)",
"AFL-1.2": "Academic Free License (AFL)",
"AFL-2.0": "Academic Free License (AFL)",
"AFL-2.1": "Academic Free License (AFL)",
"AFL-3.0": "Academic Free License (AFL)",
"Apache-1.1": "Apache Software License",
"Apache-2.0": "Apache Software License",
"APSL-1.1": "Apple Public Source License",
"APSL-1.2": "Apple Public Source License",
"APSL-2.0": "Apple Public Source License",
"Artistic-1.0": "Artistic License",
"Artistic-2.0": "Artistic License",
"AAL": "Attribution Assurance License",
"AGPL-3.0": "GNU Affero General Public License v3",
"AGPL-3.0-only": "GNU Affero General Public License v3",
"AGPL-3.0-or-later": "GNU Affero General Public License v3 or later (AGPLv3+)",
"BSL-1.0": "Boost Software License 1.0 (BSL-1.0)",
"BSD-2-Clause": "BSD License",
"BSD-3-Clause": "BSD License",
"CDDL-1.0": "Common Development and Distribution License 1.0 (CDDL-1.0)",
"CECILL-2.1": "CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)",
"CPL-1.0": "Common Public License",
"EPL-1.0": "Eclipse Public License 1.0 (EPL-1.0)",
"EFL-1.0": "Eiffel Forum License",
"EFL-2.0": "Eiffel Forum License",
"EUPL-1.1": "European Union Public Licence 1.1 (EUPL 1.1)",
"EUPL-1.2": "European Union Public Licence 1.2 (EUPL 1.2)",
"GPL-2.0": "GNU General Public License v2 (GPLv2)",
"GPL-2.0-only": "GNU General Public License v2 (GPLv2)",
"GPL-2.0+": "GNU General Public License v2 or later (GPLv2+)",
"GPL-2.0-or-later": "GNU General Public License v2 or later (GPLv2+)",
"GPL-3.0": "GNU General Public License v3 (GPLv3)",
"GPL-3.0-only": "GNU General Public License v3 (GPLv3)",
"GPL-3.0+": "GNU General Public License v3 or later (GPLv3+)",
|
"GPL-3.0-or-later": "GNU General Public License v3 or later (GPLv3+)",
"LGPL-2.0": "GNU Lesser General Public License v2 (LGPLv2)",
"LGPL-2.0-only": "GNU Lesser General Public License v2 (LGPLv2)",
"LGPL-2.0+": "GNU Lesser General Public License v2 or later (LGPLv2+)",
|
"LGPL-2.0-or-later": "GNU Lesser General Public License v2 or later (LGPLv2+)",
"LGPL-3.0": "GNU Lesser General Public License v3 (LGPLv3)",
"LGPL-3.0-only": "GNU Lesser General Public License v3 (LGPLv3)",
"LGPL-3.0+": "GNU Lesser General Public License v3 or later (LGPLv3+)",
"LGPL-3.0-or-later": "GNU Lesser General Public License v3 or later (LGPLv3+)",
"MPL-1.0": "Mozilla Public License 1.0 (MPL)",
"MPL-1.1": "Mozilla Public License 1.1 (MPL 1.1)",
"MPL-2.0": "Mozilla Public License 2.0 (MPL 2.0)",
"W3C": "W3C License",
"ZPL-1.1": "Zope Public License",
"ZPL-2.0": "Zope Public License",
"ZPL-2.1": "Zope Public License",
}
@property
def classifier(self) -> str:
parts = ["License"]
if self.is_osi_approved:
parts.append("OSI Approved")
name = self.classifier_name
if name is not None:
parts.append(name)
return " :: ".join(parts)
@property
def classifier_name(self) -> Optional[str]:
if self.id not in self.CLASSIFIER_SUPPORTED:
if self.is_osi_approved:
return None
return "Other/Proprietary License"
if self.id in self.CLASSIFIER_NAMES:
return self.CLASSIFIER_NAMES[self.id]
return self.name
|
iffy/parsefin
|
parsefin/error.py
|
Python
|
apache-2.0
| 59
| 0.050847
|
class Error(Exception): pass
class
|
MissingData(Error):
|
pass
|
PapaCharlie/WolframAlphaLookup
|
WolframAlphaLookup.py
|
Python
|
mit
| 2,331
| 0.005148
|
import sublime, sublime_plugin, requests
from xml.etree import ElementTree as ET
class WolframAlphaLookupCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings("Preferences.sublime-settings")
if settings.has("wolfram_api_key"):
API_KEY = settings.get("wolfram_api_key")
for region in self.window.active_view(
|
).sel():
|
if not region.empty():
query = self.window.active_view().substr(region)
else:
query = self.window.active_view().substr(self.window.active_view().line(region))
query = query.strip()
r = requests.get("http://api.wolframalpha.com/v2/query", params={
"input": query,
"appid": API_KEY
})
root = ET.fromstring(r.text)
if root.get('success') == 'true':
items = list()
for pod in root.iter('pod'):
title = pod.attrib.get('title')
plaintext = pod.find('./subpod/plaintext').text
if title and plaintext:
items.append([title, plaintext])
def on_select(index):
if index > -1:
print(items[index])
print(region)
self.window.active_view().run_command("insert_result", {"data": items[index][1]})
self.window.show_quick_panel(items, on_select)
else:
sublime.error_message("Wolfram|Alpha could not understand your query!")
break
else:
sublime.error_message("Please add a \"wolfram_api_key\" to the settings!")
class InsertResultCommand(sublime_plugin.TextCommand):
def run(self, edit, data):
for region in self.view.sel():
if region.empty():
line = self.view.line(region)
self.view.insert(edit, line.end(), '\n' + (data[:-1] if data[-1] == '\n' else data))
else:
self.view.insert(edit, region.end(), data)
break
|
HyperloopTeam/FullOpenMDAO
|
cantera-2.0.2/samples/python/flames/adiabatic_flame/adiabatic_flame.py
|
Python
|
gpl-2.0
| 2,623
| 0.020206
|
#
# ADIABATIC_FLAME - A freely-propagating, premixed methane/air flat
# flame with multicomponent transport properties
#
from Cantera import *
from Cantera.OneD import *
from Cantera.OneD.FreeFlame import FreeFlame
################################################################
#
# parameter values
#
p = OneAtm # pressure
tin = 300.0 # unburned gas temperature
mdot = 0.04 # kg/m^2/s
comp = 'CH4:0.45, O2:1, N2:3.76' # premixed gas composition
initial_grid = [0.0, 0.001, 0.01, 0.02, 0.029, 0.03] # m
tol_ss = [1.0e-5, 1.0e-9] # [rtol atol] for steady-state
# problem
tol_ts = [1.0e-5, 1.0e-9] # [rtol atol] for time stepping
loglevel = 1 # amount of diagnostic output (0
# to 5)
refine_grid = 1 # 1 to enable refinement, 0 to
# disable
gas = GRI30('Mix')
gas.addTransportModel('Multi')
# set its state to that of the unburned gas
gas.setState_TPX(tin, p, comp)
f = FreeFlame(gas = gas, grid = initial_grid, tfix = 600.0)
# set the upstream properties
f.inlet.set(mole_fractions = comp, temperature = tin)
f.set(tol = tol_ss, tol_time = tol_ts)
f.showSolution()
f.set(energy = 'off')
f.setRefineCriteria(ratio = 10.0, slope = 1, curve = 1)
f.setMaxJacAge(50, 50)
f.setTimeStep(1.0e-5, [2, 5, 10, 20, 50])
f.solve(loglevel, refine_grid)
f.save('ch4_adiabatic.xml','no_energy',
'solution with the energy equation disabled')
|
f.set(energy = 'on')
f.setRefineCriteria(ratio = 3.0, slope = 0.1, curve = 0.2)
f.solve(loglevel, refine_grid)
f.save('ch4_adiabatic.xml','energy',
'solution with the energy equation enabled')
print 'mixture-averaged flamespeed = ',f.u()[0]
gas.switchTransportModel('Multi')
f.flame.setTransportModel(gas)
f.solve(loglevel, refine_grid)
f.save('ch4_adiabatic.xml','energy_multi',
'solution with the energy equation enabled and multicomponent transport')
# write the
|
velocity, temperature, density, and mole fractions to a CSV file
z = f.flame.grid()
T = f.T()
u = f.u()
V = f.V()
fcsv = open('adiabatic_flame.csv','w')
writeCSV(fcsv, ['z (m)', 'u (m/s)', 'V (1/s)', 'T (K)', 'rho (kg/m3)']
+ list(gas.speciesNames()))
for n in range(f.flame.nPoints()):
f.setGasState(n)
writeCSV(fcsv, [z[n], u[n], V[n], T[n], gas.density()]
+list(gas.moleFractions()))
fcsv.close()
print 'solution saved to adiabatic_flame.csv'
print 'multicomponent flamespeed = ',u[0]
f.showStats()
|
crmccreary/openerp_server
|
openerp/addons/web_diagram/controllers/main.py
|
Python
|
agpl-3.0
| 4,665
| 0.004287
|
try:
# embedded
import openerp.addons.web.common.http as openerpweb
from openerp.addons.web.controllers.main import View
except ImportError:
# standalone
import web.common.http as openerpweb
from web.controllers.main import View
class DiagramView(View):
_cp_path = "/web_diagram/diagram"
@openerpweb.jsonrequest
def load(self, req, model,
|
view_id):
fields_view = self.fields_view_get(req, model, view_id, 'diagram')
return {'fields_view': fields_view}
@openerpweb.jsonrequest
def get_diagram_info(self, req, id, model, node, connector,
src_node, des_node, label, **kw):
visible_node_fields = kw.get('visible_node_fields',[])
invisible_node_fields = kw.get('invisible_node_fields',[])
node_fields_string = kw.get('node_fields_string',[])
connector_fields =
|
kw.get('connector_fields',[])
connector_fields_string = kw.get('connector_fields_string',[])
bgcolors = {}
shapes = {}
bgcolor = kw.get('bgcolor','')
shape = kw.get('shape','')
if bgcolor:
for color_spec in bgcolor.split(';'):
if color_spec:
colour, color_state = color_spec.split(':')
bgcolors[colour] = color_state
if shape:
for shape_spec in shape.split(';'):
if shape_spec:
shape_colour, shape_color_state = shape_spec.split(':')
shapes[shape_colour] = shape_color_state
ir_view = req.session.model('ir.ui.view')
graphs = ir_view.graph_get(
int(id), model, node, connector, src_node, des_node, label,
(140, 180), req.session.context)
nodes = graphs['nodes']
transitions = graphs['transitions']
isolate_nodes = {}
for blnk_node in graphs['blank_nodes']:
isolate_nodes[blnk_node['id']] = blnk_node
else:
y = map(lambda t: t['y'],filter(lambda x: x['y'] if x['x']==20 else None, nodes.values()))
y_max = (y and max(y)) or 120
connectors = {}
list_tr = []
for tr in transitions:
list_tr.append(tr)
connectors.setdefault(tr, {
'id': tr,
's_id': transitions[tr][0],
'd_id': transitions[tr][1]
})
connector_tr = req.session.model(connector)
connector_ids = connector_tr.search([('id', 'in', list_tr)], 0, 0, 0, req.session.context)
data_connectors =connector_tr.read(connector_ids, connector_fields, req.session.context)
for tr in data_connectors:
transition_id = str(tr['id'])
_sourceid, label = graphs['label'][transition_id]
t = connectors[transition_id]
t.update(
source=tr[src_node][1],
destination=tr[des_node][1],
options={},
signal=label
)
for i, fld in enumerate(connector_fields):
t['options'][connector_fields_string[i]] = tr[fld]
fields = req.session.model('ir.model.fields')
field_ids = fields.search([('model', '=', model), ('relation', '=', node)], 0, 0, 0, req.session.context)
field_data = fields.read(field_ids, ['relation_field'], req.session.context)
node_act = req.session.model(node)
search_acts = node_act.search([(field_data[0]['relation_field'], '=', id)], 0, 0, 0, req.session.context)
data_acts = node_act.read(search_acts, invisible_node_fields + visible_node_fields, req.session.context)
for act in data_acts:
n = nodes.get(str(act['id']))
if not n:
n = isolate_nodes.get(act['id'], {})
y_max += 140
n.update(x=20, y=y_max)
nodes[act['id']] = n
n.update(
id=act['id'],
color='white',
options={}
)
for color, expr in bgcolors.items():
if eval(expr, act):
n['color'] = color
for shape, expr in shapes.items():
if eval(expr, act):
n['shape'] = shape
for i, fld in enumerate(visible_node_fields):
n['options'][node_fields_string[i]] = act[fld]
id_model = req.session.model(model).read([id],['name'], req.session.context)[0]['name']
return dict(nodes=nodes,
conn=connectors,
id_model=id_model,
parent_field=graphs['node_parent_field'])
|
myangeline/pygame
|
itgame/itgame.py
|
Python
|
apache-2.0
| 7,541
| 0.000959
|
# _*_coding:utf-8_*_
import math
import random
__author__ = 'Administrator'
import pygame
pygame.init()
width, height = 640, 480
keys = [False, False, False, False]
playerpos = [100, 240]
# 记录玩家射击精度,射击次数、命中次数
acc = [0, 0]
arrows = []
# 命中率
accuracy = 0
# 记录獾的数据
badtimer = 100
rest = 0
badguys = [[640, 100]]
healthvalue = 194
screen = pygame.display.set_mode((width, height), 0, 32)
pygame.display.set_caption(u'城堡保卫战'.encode('utf-8'))
player = pygame.image.load('resources/images/dude.png')
player_w, player_h = player.get_width(), player.get_height()
grass = pygame.image.load('resources/images/grass.png')
castle = pygame.image.load('resources/images/castle.png')
arrow = pygame.image.load('resources/images/bullet.png')
bulletrect = pygame.Rect(arrow.get_rect())
badguyimg1 = pygame.image.load('resources/images/badguy.png')
badguyimg = badguyimg1
badrect = pygame.Rect(badguyimg.get_rect())
healthbar = pygame.image.load('resources/images/healthbar.png')
health = pygame.image.load('resources/images/health.png')
youwin = pygame.image.load('resources/images/youwin.png')
gameover = pygame.image.load('resources/images/gameover.png')
# 加载音频
pygame.mixer.init()
hit = pygame.mixer.Sound('resources/audio/explode.wav')
enemy = pygame.mixer.Sound('resources/audio/enemy.wav')
shoot = pygame.mixer.Sound('resources/audio/shoot.wav')
hit.set_volume(0.05)
enemy.set_volume(0.05)
shoot.set_volume(0.05)
# 设置背景音乐,循环播放
pygame.mixer.music.load('resources/audio/moonlight.wav')
pygame.mixer.music.play(-1, 0.0)
pygame.mixer.music.set_volume(0.25)
running = 1
exitcode = 0
# 全屏
fullscreen = False
while running:
badtimer -= 1
screen.fill(0)
for x in range(width / grass.get_width() + 1):
for y in range(height / grass.get_height() + 1):
screen.blit(grass, (x * 100, y * 100))
for i in range(4):
screen.blit(castle, (0, i * 105 + 30))
# screen.blit(player, playerpos)
mousepos = pygame.mouse.get_pos()
angle = math.atan2(mousepos[1] - (playerpos[1] + 32), mousepos[0] - (playerpos[0] + 26))
playerrot = pygame.transform.rotate(player, 360 - angle * 57.29)
playertranspos = (playerpos[0] - playerrot.get_rect().width / 2, playerpos[1] - playerrot.get_rect().height / 2)
screen.blit(playerrot, playertranspos)
# 绘制 箭
for bullet in arrows:
index = 0
velx = math.cos(bullet[0]) * 10
vely = math.sin(bullet[0]) * 10
bullet[1] += velx
bullet[2] += vely
if bullet[1] < -64 or bullet[1] > 640 or bullet[2] < -64 or bullet[2] > 480:
arrows.pop(index)
index += 1
for projectile in arrows:
aow = pygame.transform.rotate(arrow, 360 - projectile[0] * 57.29)
screen.blit(aow, (projectile[1], projectile[2]))
# 产生 獾
if badtimer == 0:
badguys.append([640, random.randint(50, 430)])
badtimer = 100 - (rest * 2)
rest = 35 if rest >= 35 else rest + 5
index = 0
for badguy in badguys:
if badguy[0] < -64:
badguys.pop(index)
# 炸毁城堡
badrect.top = badguy[1]
badrect.left = badguy[0]
if badrect.left < 64:
hit.play()
healthvalue -= random.randint(5, 20)
badguys.pop(index)
index_bullet = 0
for bullet in arrows:
bulletrect.left = bullet[1]
bulletrect.top = bullet[2]
if badrect.colliderect(bulletrect):
enemy.play()
acc[0] += 1
badguys.pop(index)
arrows.pop(index_bullet)
index_bullet += 1
badguy[0] -= 7
index += 1
for badguy in badguys:
screen.blit(badguyimg, badguy)
# 绘制时钟
font = pygame.font.Font(None, 24)
time = pygame.time.get_ticks()
survivedtext = font.render(str((90000 - time) / 60000) + ":" + str((90000 - time) / 1000 % 60).zfill(2), True,
(0, 0, 0))
textrect = survivedtext.get_rect()
textrect.topright = [635, 5]
screen.blit(survivedtext, textrect)
# 绘制血条
screen.blit(healthbar, (5, 5))
for h in range(healthvalue):
screen.blit(health, (h + 8, 8))
# 更新屏幕 update和flip的效果差不多好像
# pygame.display.flip()
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_w:
keys[0] = True
elif event.key == pygame.K_a:
keys[1] = True
elif event.key == pygame.K_s:
keys[2] = True
elif event.key == pygame.K_d:
keys[3] = True
if event.key == pygame.K_f:
fullscreen = not fullscreen
if fullscreen:
screen = pygame.display.set_mode((width, height), pygame.FULLSCREEN, 32)
else:
screen = pygame.display.set_mode((width, height), 0, 32)
if event.type == pygame.KEYUP:
if event.key == pygame.K_w:
keys[0] = False
elif event.key == pygame.K_a:
keys[1] = False
elif event.key == pygame.K_s:
keys[2] = False
elif event.key == pygame.K_d:
keys[3] = False
if event.type == pygame.MOUSEBUTTONDOWN:
shoot.play()
position = pygame.mouse.get_pos()
acc[1] += 1
arrows.append([math.atan2(position[1] - (playertranspos[1] + 32), position[0] - (playertranspos[0] + 26)),
playertranspos[0] + 32, playertranspos[1] + 32])
if keys[0]:
playerpos[1] -= 5
if playerpos[1] < 0:
playerpos[1] = 0
elif keys[2]:
playerpos[1] += 5
if playerpos[1] > height - player_h:
playerpos[1] = height - player_h
if keys[1]:
playerpos[0] -= 5
if playerpos[0] < 0:
playerpos[0] = 0
elif keys[3]:
playerpos[0] += 5
if playerpos[0] > width - player_w:
playerpos[0] = width - pla
|
yer_w
# 判断输赢
if pygame.time.get_ticks() >= 90000:
running = 0
# win
|
exitcode = 1
if healthvalue <= 0:
running = 0
# lose
exitcode = 0
if acc[1] != 0:
accuracy = round(acc[0] * 1.0 / acc[1] * 100, 2)
pygame.font.init()
# font = pygame.font.Font(None, 24)
# 为了显示中文需要设置中文字体,可以使用字体文件和系统字体
font = pygame.font.SysFont('楷体', 24)
if exitcode:
text = font.render(u'命中率:' + str(accuracy) + '%', True, (0, 255, 0))
textrect = text.get_rect()
textrect.centerx = screen.get_rect().centerx
textrect.centery = screen.get_rect().centery + 24
screen.blit(youwin, (0, 0))
screen.blit(text, textrect)
else:
text = font.render(u'命中率:' + str(accuracy) + '%', True, (255, 0, 0))
textrect = text.get_rect()
textrect.centerx = screen.get_rect().centerx
textrect.centery = screen.get_rect().centery + 24
screen.blit(gameover, (0, 0))
screen.blit(text, textrect)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
pygame.display.flip()
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGL/raw/GL/ARB/clear_buffer_object.py
|
Python
|
lgpl-3.0
| 845
| 0.04142
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_ARB_clear_buffer_object'
def _f
|
( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_ARB_clear_buffer_object',error_checker=_errors._error_checker)
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,ctypes.c_void_p)
def glClearBufferData(target,internalformat,format,type,data):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLintptr,_cs.GLsizeiptr,_cs.GLenum,_cs.GLenum,ctypes.c_void_p)
def glClearBufferSubData(target,internalformat,offset,size,format,type,data):p
|
ass
|
Ziqi-Li/bknqgis
|
bokeh/examples/howto/server_embed/tornado_embed.py
|
Python
|
gpl-2.0
| 2,322
| 0.003015
|
from jinja2 import Environment, FileSystemLoader
import yaml
from tornado.ioloop import IOLoop
from tornado.web import RequestHandler
from bokeh.application import Application
from bokeh.application.handlers import FunctionHandler
from bokeh.embed import server_document
from bokeh.layouts import column
from bokeh.models import ColumnDataSource, Slider
from bokeh.plotting import figure
from bokeh.server.server import Server
from bokeh.themes import Theme
from bokeh.sampledata.sea_surface_temperature import sea_surface_temperature
env = Environment(loader=FileSystemLoader('templates'))
class IndexHandler(RequestHandler):
def get(self):
template = env.get_template('embed.html')
script = server_document('http://localhost:5006/bkapp')
self.write(template.render(script=script, template="Tornado"))
def modify_doc(doc):
df = sea_surface_temperature.copy()
source = ColumnDataSource(data=df)
plot = figure(x_axis_type='datetime', y_range=(0, 25), y_axis_label='Temperature (Celsius)',
title="Sea Surface Temperature at 43.18, -70.43")
plot.line('time', 'temperature', source=source)
def callback(attr, old, new):
if new == 0:
data = df
else:
data = df.rolling('{0}D'.format(new)).mean()
source.data = ColumnDataSource(data=data).data
slider = Slider(start=0, end=30, value=0, step=1, title="Smoothing by N Days")
slider.on_change('value', callback)
doc.add_root(column(slider, plot))
doc.theme = Theme(json=yaml.load("""
attrs:
Figure:
background_fill_color: "#DDDDDD"
outline_line_color: white
|
toolbar_location: above
height: 500
width: 800
Grid:
grid_line_dash: [6, 4]
grid_line_color: white
"""))
bokeh_app = Application(FunctionHandler(modify_doc))
io_loop = IOLoop.current()
server = Server({'/bkapp': bokeh_app}, io_loop=io_loop, extra_patterns=[('/', IndexHandler)])
server.start()
if __name__ ==
|
'__main__':
from bokeh.util.browser import view
print('Opening Tornado app with embedded Bokeh application on http://localhost:5006/')
io_loop.add_callback(view, "http://localhost:5006/")
io_loop.start()
|
Juniper/ceilometer
|
ceilometer/alarm/notifier/test.py
|
Python
|
apache-2.0
| 1,257
| 0
|
#
# Copyright 2013 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific lang
|
uage governing permissions and limitations
# under the License.
"""Test alarm notifier."""
from ceilometer.alarm import notifier
class TestAlarmNotifier(notifier.AlarmNotifier):
"Test alarm notifier."""
def __init__(self):
self.notifications = []
def notify(self, action, alarm_id, alarm_name, severity,
previous, current, reason, reason_dat
|
a):
self.notifications.append((action,
alarm_id,
alarm_name,
severity,
previous,
current,
reason,
reason_data))
|
inovtec-solutions/OpenERP
|
openerp/addons/smsfee/__openerp__.py
|
Python
|
agpl-3.0
| 859
| 0.003492
|
{
'name': 'SMS Fee',
'version': '1.0',
'author': 'Inovtec Solutions',
'category': 'SMS Fee Management',
'description': """This Module is used for fee management for Compas ManagmentS ystem.""",
'website': 'http://www.inovtec.com.pk',
'images': [''],
'depends' : ['sms'],
'data': ['security/smsfee_security.xml',
'security/ir.model.access.csv',
'wizard/smsfee_wizard_update
|
_fee_register.xml',
'wizard/smsfee_wizard_fee_repor
|
ts.xml',
'wizard/smsfee_wizard_class_fee_receipt_unpaid.xml',
'wizard/smsfee_wizard_daily_fee_reports.xml',
'smsfee_report.xml',
'smsfee_view.xml',
'smsfee_menus.xml',
],
'demo': [],
'installable': True,
'application': True,
'auto_install': False,
'images': [],
}
|
ivanyu/rosalind
|
algorithmic_heights/inv/inv_logic.py
|
Python
|
mit
| 2,407
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
class InversionsCoun
|
ter:
# Taken from mer problem and modified
@staticmethod
def _merge_with_inv_counting(a1, a2):
result = []
invs = 0
i = 0
j = 0
while i < len(a1) or j < len(a2):
if i == len(a1):
result.extend(a2[j:])
break
if j == len(a2):
|
result.extend(a1[i:])
break
if a1[i] <= a2[j]:
result.append(a1[i])
i += 1
else:
result.append(a2[j])
j += 1
invs += len(a1[i:])
return result, invs
def _merge_sort_with_inv_counting(self, arr):
if len(arr) <= 1:
return arr, 0
m = len(arr) // 2
a1, invs1 = self._merge_sort_with_inv_counting(arr[:m])
a2, invs2 = self._merge_sort_with_inv_counting(arr[m:])
a_merged, invs_merge = self._merge_with_inv_counting(a1, a2)
total_inv = invs1 + invs2 + invs_merge
return a_merged, total_inv
def count(self, arr):
_, invs = self._merge_sort_with_inv_counting(arr)
return invs
if __name__ == "__main__":
import unittest
class MergeSortTestCase(unittest.TestCase):
def setUp(self):
self.inv_cnt = InversionsCounter()
def test_empty_array(self):
arr = []
self.assertEqual(self.inv_cnt.count(arr), 0)
def test_one_element_array(self):
arr = [6]
self.assertEqual(self.inv_cnt.count(arr), 0)
def test_sorted_array(self):
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9]
self.assertEqual(self.inv_cnt.count(arr), 0)
def test_simple_reversed(self):
arr = [2, 1]
self.assertEqual(self.inv_cnt.count(arr), 1)
def test_complex_reversed(self):
arr = [4, 3, 2, 1]
self.assertEqual(self.inv_cnt.count(arr), 6)
def test_all_equals(self):
arr = [1, 1, 1, 1, 1]
self.assertEqual(self.inv_cnt.count(arr), 0)
def test_some_equals(self):
arr = [5, 5, 1, 2]
self.assertEqual(self.inv_cnt.count(arr), 4)
def test_mixed(self):
arr = [5, 1, 2, 4, 6, 3]
self.assertEqual(self.inv_cnt.count(arr), 6)
unittest.main()
|
yancharkin/games_nebula_goglib_scripts
|
the_temple_of_elemental_evil/settings.py
|
Python
|
gpl-3.0
| 36,924
| 0.010373
|
import sys, os
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk, GLib
import gettext
import imp
nebula_dir = os.getenv('NEBULA_DIR')
modules_dir = nebula_dir + '/modules'
set_visuals = imp.load_source('set_visuals', modules_dir + '/set_visuals.py')
gettext.bindtextdomain('games_nebula', nebula_dir + '/locale')
gettext.textdomain('games_nebula')
_ = gettext.gettext
link_co8_modpack_std = "http://www.moddb.com/downloads/start/73404?referer=http%3A%2F%2Fwww.moddb.com%2Fmods%2Fcircle-of-eight-modpack"
link_co8_modpack_nc = "http://www.moddb.com/downloads/start/73406?referer=http%3A%2F%2Fwww.moddb.com%2Fmods%2Fcircle-of-eight-modpack"
link_co8_kob = "http://www.moddb.com/downloads/start/73408?referer=http%3A%2F%2Fwww.moddb.com%2Fmods%2Fthe-keep-on-the-borderlands"
link_bg_portraits_std = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Baldur's%20Gate%20Portrait%20Pack%208.1.0%20Standard%20Edition%20Setup.exe"
link_bg_portraits_nc = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Baldur's%20Gate%20Portrait%20Pack%208.1.0%20New%20Content%20Edition%20Setup.exe"
link_bg_portraits_kob = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Baldur's%20Gate%20Portrait%20Pack%201.0.1%20Keep%20on%20the%20Borderlands%20Setup.exe"
link_id_portraits_std = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Icewind%20Dale%20Portrait%20Pack%208.1.0%20Standard%20Edition%20Setup.exe"
link_id_portraits_nc = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Icewind%20Dale%20Portrait%20Pack%208.1.0%20New%20Content%20Edition%20Setup.exe"
link_id_portraits_kob = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Icewind%20Dale%20Portrait%20Pack%201.0.1%20Keep%20on%20the%20Borderlands%2
|
0Setup.exe"
link_ja2_portraits_std = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Jagged%20Alliance%202%20Portrait%20Pack%208.1.0%20Standard%20Edition%20Setup.exe"
link_ja2_portraits_nc = "http://files.co8.org/mods/TFE-X%20M
|
odules/Portrait%20Packs/Jagged%20Alliance%202%20Portrait%20Pack%208.1.0%20New%20Content%20Edition%20Setup.exe"
link_ja2_portraits_kob = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Jagged%20Alliance%202%20Portrait%20Pack%201.0.1%20Keep%20on%20the%20Borderlands%20Setup.exe"
link_lr_portraits_std = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Luis%20Royo%20Portrait%20Pack%208.1.0%20Standard%20Edition%20Setup.exe"
link_lr_portraits_nc = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Luis%20Royo%20Portrait%20Pack%208.1.0%20New%20Content%20Edition%20Setup.exe"
link_lr_portraits_kob = "http://files.co8.org/mods/TFE-X%20Modules/Portrait%20Packs/Luis%20Royo%20Portrait%20Pack%201.0.1%20Keep%20on%20the%20Borderlands%20Setup.exe"
exe_co8_modpack_std = "Circle_of_Eight_Modpack_8.1.0_Standard_Edition_Setup.exe"
exe_co8_modpack_nc = "Circle_of_Eight_Modpack_8.1.0_New_Content_Edition_Setup.exe"
exe_co8_kob = "Keep_on_the_Borderlands_1.0.1_Setup.exe"
exe_bg_portraits_std = "Baldur's Gate Portrait Pack 8.1.0 Standard Edition Setup.exe"
exe_bg_portraits_nc = "Baldur's Gate Portrait Pack 8.1.0 New Content Edition Setup.exe"
exe_bg_portraits_kob = "Baldur's Gate Portrait Pack 1.0.1 Keep on the Borderlands Setup.exe"
exe_id_portraits_std = "Icewind Dale Portrait Pack 8.1.0 Standard Edition Setup.exe"
exe_id_portraits_nc = "Icewind Dale Portrait Pack 8.1.0 New Content Edition Setup.exe"
exe_id_portraits_kob = "Icewind Dale Portrait Pack 1.0.1 Keep on the Borderlands Setup.exe"
exe_ja2_portraits_std = "Jagged Alliance 2 Portrait Pack 8.1.0 Standard Edition Setup.exe"
exe_ja2_portraits_nc = "Jagged Alliance 2 Portrait Pack 8.1.0 New Content Edition Setup.exe"
exe_ja2_portraits_kob = "Jagged Alliance 2 Portrait Pack 1.0.1 Keep on the Borderlands Setup.exe"
exe_lr_portraits_std = "Luis Royo Portrait Pack 8.1.0 Standard Edition Setup.exe"
exe_lr_portraits_nc = "Luis Royo Portrait Pack 8.1.0 New Content Edition Setup.exe"
exe_lr_portraits_kob = "Luis Royo Portrait Pack 1.0.1 Keep on the Borderlands Setup.exe"
tfm_co8_modpack_std = ' '.join(exe_co8_modpack_std.split('_')[:-2]) + '.tfm'
tfm_co8_modpack_nc = ' '.join(exe_co8_modpack_nc.split('_')[:-2]) + '.tfm'
tfm_co8_kob = ' '.join(exe_co8_kob.split('_')[:-1]) + '.tfm'
tpp_bg_portraits_std = ' '.join(exe_bg_portraits_std.split(' ')[:-1]) + '.tpp'
tpp_id_portraits_std = ' '.join(exe_id_portraits_std.split(' ')[:-1]) + '.tpp'
tpp_ja2_portraits_std = ' '.join(exe_ja2_portraits_std.split(' ')[:-1]) + '.tpp'
tpp_lr_portraits_std = ' '.join(exe_lr_portraits_std.split(' ')[:-1]) + '.tpp'
current_dir = sys.path[0]
download_dir = os.getenv('DOWNLOAD_DIR')
install_dir = os.getenv('INSTALL_DIR')
game_dir = install_dir + '/the_temple_of_elemental_evil/game'
os.system('mkdir -p "' + download_dir + '/_distr/the_temple_of_elemental_evil"')
std_addons_dir = current_dir + '/game/addons/Circle of Eight Modpack 8.1.0 Standard'
bg_portraits_installed = os.path.exists(std_addons_dir + '/' + tpp_bg_portraits_std)
id_portraits_installed = os.path.exists(std_addons_dir + '/' + tpp_id_portraits_std)
ja2_portraits_installed = os.path.exists(std_addons_dir + '/' + tpp_ja2_portraits_std)
lr_portraits_installed = os.path.exists(std_addons_dir + '/' + tpp_lr_portraits_std)
config_file_path = current_dir + '/game/toee.cfg'
if not os.path.exists(config_file_path):
config_file_path = current_dir + '/game/ToEE.cfg'
class GUI:
def __init__(self):
self.config_load()
self.create_main_window()
self.create_co8_std_window()
self.create_co8_nc_window()
self.create_co8_kob_window()
self.create_portraits_window()
def create_main_window(self):
self.main_window = Gtk.Window(
title = _("The Temple of Elemental Evil"),
type = Gtk.WindowType.TOPLEVEL,
window_position = Gtk.WindowPosition.CENTER_ALWAYS,
resizable = False,
)
self.main_window.connect('delete-event', self.quit_app)
grid = Gtk.Grid(
margin_left = 10,
margin_right = 10,
margin_top = 10,
margin_bottom = 10,
row_spacing = 10,
column_spacing = 10,
column_homogeneous = True,
)
label_custom_res = Gtk.Label(
label = _("Custom resolution:")
)
entry_custom_width = Gtk.Entry(
placeholder_text = _("Width"),
max_length = 4,
xalign = 0.5,
text = self.custom_width
)
entry_custom_width.connect('changed', self.cb_entry_custom_width)
entry_custom_height = Gtk.Entry(
placeholder_text = _("Height"),
max_length = 4,
xalign = 0.5,
text = self.custom_height
)
entry_custom_height.connect('changed', self.cb_entry_custom_height)
self.button_co8_tfe_x = Gtk.Button(
label = _("TFE-X"),
no_show_all = True
)
self.button_co8_tfe_x.connect('clicked', self.cb_button_co8_tfe_x)
if os.path.exists(current_dir + '/game/TFE-X.jar'):
self.button_co8_tfe_x.set_visible(True)
self.button_co8_std = Gtk.Button(
label = _("Install 'Co8 Mopdack Standard Edition'"),
no_show_all = True
)
self.button_co8_std.connect('clicked', self.cb_button_co8_std)
if not os.path.exists(current_dir + '/game/' + tfm_co8_modpack_std):
self.button_co8_std.set_visible(True)
self.button_co8_nc = Gtk.Button(
label = _("Install 'Co8 Mopdack New Content Edition'"),
no_show_all = True
)
self.button_co8_nc.connect('clicked', self.cb_button_co8_nc)
if not os.path.exists(current_dir + '/game/' + tfm_co8_modpack_nc):
self.button_co8_nc.set_visible(True)
self.button_co8_kob = Gtk.Button(
label = _("Install 'The Keep on the Borderlands'"),
no_show_all = True
)
self.button_co8_kob.connect('clicked', self.cb_button_co8_kob)
if not os.path.exis
|
Real-Instruments/fflib
|
fflib/peer.py
|
Python
|
agpl-3.0
| 26,829
| 0.003541
|
#!/usr/bin/env python3
# The MIT License (MIT)
# Copyright (c) 2016 Michael Sasser <Michael.Sasser@Real-Instruments.de>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from _thread import start_new_thread, allocate_lock
import ntpath
import glob
from fflib._version import version
from fflib.meshviewer import MeshViewer
from fflib.std import operating_system, origin_name, re_name_line, re_mac_line, re_key_line, re_comment_line, re_peer_line, \
re_name, re_mac, re_key
VERSION = str(version())
os_type, os_release, os_version, os_distribution = operating_system()
NAME = origin_name()
class PeerObject(object):
"""(Use getter and setter to change a Value manuelly!)"""
def __init__(self, file):
self.file = file
# Steps
self.__read_done = False
self.__updated = False
self.__syntax_verified = False
self.__written = False
self.__committed = False
# Data
self.__filename = ntpath.basename(file)
self.__filename_given = self.__filename
self.__filename_old = None
self.__name = None
self.__name_old = None
self.__name_veryfied = None
self.__name_inconsistent = False
self.__mac = None
self.__mac_old = None
self.__mac_veryfied = None
self.__comment = None
self.__comment_old = None # Should never be used!
self.__key = None
self.__key_old = None
self.__key_veryfied = None # For future; Currently testing in 4.0.0
# Meshviewer specific
self.__mv_probability = None
self.__script_Name = 'TestScript' # Static
self.__script_Names_old = () # Static
self.__script_use_found = None # True == Yes; False == No; None == Not checked yet
self.__script_last_edit_date = None
self.__script_version = None
self.__script_verified = None
# Errors (Checks)
# Do not change these Integers (keys)! These are needed for Errorhandling.
#
# 1 - 19 reserved
# 20 - 199 Std. errors, warnings and verbose
# 200 - 219 reserved
# 220 - 400 Git commit Std. errors, warnings and verbose
#
# Error Code | Message string
self.__msg = {40: 'Duplicate: The name \"{}\" is already in use in file \"{}\".',
41: 'Duplicate: The MAC \"{}\" is already in use in file \"{}\".',
42: 'Duplicate: The key \"{}\" is already in use in file \"{}\".',
43: 'Duplicate: The Filename \"{}\" is already present as filename \"{}\".',
50: 'The file includes non unicode content.',
51: 'The gwconfig line seems to be broken',
52: 'Inconsistent Name. Filename \"{}\" not matching Name {} in the file on line {}.',
60: 'Name not match pattern on line {}.',
61: 'MAC not match pattern on line {}.',
62: 'Key not match pattern on line {}.',
63: 'Comments not matching pattern on line {}.',
64: 'Gwconfig line not match pattern on line {}.',
70: 'Name found on line {} is already present at line {}.',
71: 'MAC found on line {} is already present at line {}.',
72: 'Key found on line {} is already present at line {}.',
73: 'Comment found on line {} is already present at line {}.',
74: 'Gwconfig line found on line {} is already present at line {}.',
80: 'Name not found, the filename \"{}\" is the new name for now',
90: 'Name not found.',
91: 'MAC not found.',
92: 'Comment not found. The comment will be added on rewriting the file.',
93: 'key not found.',
94: 'Gwconfig line not found in the file. The gwconfig line will be added on rewriting the file.',
100: 'Name changed from \"{}\" to \"{}\" with a Meshviewer match probability of {:6.2f}%.',
101: 'MAC changed from \"{}\" to \"{}\" with a Meshviewer match probability of {:6.2f}%.',
102: 'Filename changed from \"{}\" to \"{}\" with a Meshviewer match probability of {:6.2f}%.',
103: 'Name not found in Meshviewer data.',
104: 'Name found in Meshviewer data (Node ID: {})',
105: 'Mac address found in Meshviewer data (Node ID: {})',
106: 'Mac not found in Meshviewer data.',
107: 'File checked with Meshviewer data informations. The Meshviewer match probability is {:6.2f}%.',
108: 'Meshviewer data are insufficient for the file.'}
# Status
self.__debug = {}
self.__warn = {}
self.__error = {}
# Commit data
self.__commit_head_msg = None
self.__commit_head_msg_manuelly = False
self.__commit_comment_msg = []
self.__commit_comment_msg_manuelly = False
self.__commit_dict = {}
# Statics
self.force_rewrite = False
self.__force_rewrite_versiones = ([0, 0, 0])
# Open the File
self.read()
def __repr__(self):
return self.__filename_given
def __str__(self):
return self.__filename
def read(self):
file_raw = []
# Unicode error
self.__del_msgs(50)
try:
with open(self.file) as fp:
for line in fp:
file_raw.append(line)
except UnicodeError:
# Unicode error
self.__error.update({50: self.__msg[50]})
return False
# Start proces
|
sing
self.__read_process(file_raw)
def __del_msgs(self, *args):
for err in args:
self.__error.pop(err, None)
self.
|
__warn.pop(err, None)
self.__debug.pop(err, None)
return 0
def __read_process(self, file_raw):
self.__del_msgs(70, 51, 52, 60, 71, 61, 72, 62, 73, 80)
line_no = 0
self.__read_done = False
cache_name = None
cache_mac = None
cache_key = None
cache_comment = None
cache_script_name = None
cache_script_version = None
cache_script_last_edit_date = None
cache_script_used_found = False
cache_script_verified = None
for line in file_raw:
check_matches_found = 0
line_no += 1
# ToDo: Detect supernodes!!!!!
# Check: Name (perfect match)
if re_name_line.match(line):
l = line.rstrip().split(' ')[1]
if re_name.match(l):
if self.__filename == l:
if not cache_name:
cache_name = (l, line_no)
check_matches_found += 1
else:
# Already found
self.__error.update(
|
mpasternak/pyglet-fix-issue-518-522
|
tests/image/PLATFORM_RGBA_LOAD.py
|
Python
|
bsd-3-clause
| 963
| 0.003115
|
#!/usr/bin/env python
'''Test RGBA load using the platform decoder (QuickTime, Quartz, GDI+ or Gdk).
You should see the rgba.png image on a checkboard background.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import unittest
import base_load
import sys
if sys.platform == 'linux2':
from pyglet.image.codecs.gdkpixbuf2 import GdkPixbuf2ImageDecoder as dclass
elif sys.platform in ('win32', 'cygwin'):
from pyglet.image.codecs.gdiplus import GDIPlusDecoder as dclass
elif sys.platform == 'darwin':
from pyglet import options as pyglet_options
if pyglet_options['darwin_cocoa']:
from pyglet.image.codecs.quartz import QuartzImageDecoder as dclass
else:
from pyglet.image.codecs.quicktime import QuickTimeI
|
mageDecoder as dclass
class TEST_PLATFORM_RGBA_LOAD(base_load.TestLoad):
texture_file = 'rgba.png'
decoder = dcl
|
ass()
if __name__ == '__main__':
unittest.main()
|
yytang2012/novels-crawler
|
novelsCrawler/spiders/m-daomengren.py
|
Python
|
mit
| 1,831
| 0.000548
|
#!/usr/bin/env python
# coding=utf-8
"""
Created on April 15 2017
@author: yytang
"""
from scrapy import Selector
from libs.misc import get_spider_name_from_domain
from libs.polish import *
from novelsCrawler.spiders.novelSpider import NovelSpider
class DaomengrenMobileSpider(NovelSpider):
"""
classdocs
example: http://m.daomengren.com/wapbook-7922/
"""
dom = 'm.daomengren.com'
name = get_spider_name_from_domain(dom)
allowed_domains = [dom]
def parse_title(self, response):
sel = Selector(response)
title = sel.xpath('//h3/text()
|
').extract()[0]
title = polish_title(title, self.name)
return title
def parse_episodes(self, response):
sel = Selector(response)
episodes = []
subtitle_selectors = sel.xpath('//ul[@class="chapter"]/li/a')
for page_id, subtitle_selector in enumerate(subtitle_selectors):
subtitle_url = s
|
ubtitle_selector.xpath('@href').extract()[0]
subtitle_url = response.urljoin(subtitle_url.strip())
subtitle_name = subtitle_selector.xpath('text()').extract()[0]
subtitle_name = polish_subtitle(subtitle_name)
episodes.append((page_id, subtitle_name, subtitle_url))
return episodes
def parse_content(self, response):
sel = Selector(response)
content = sel.xpath('//div[@id="nr1"]/text()').extract()
content = polish_content(content)
return content
def get_next_page_url(self, response):
sel = Selector(response)
next_page_url_list = sel.xpath('//div[@class="page"]/a[contains(text(), "下一页")]/@href').extract()
if len(next_page_url_list) != 0:
next_page_url = next_page_url_list[0]
return next_page_url
else:
return None
|
martinohanlon/minecraft-starwars
|
planet.py
|
Python
|
mit
| 1,268
| 0.005521
|
import mcpi.minecraft as minecraft
import mcpi.block as block
import mcpi.minecraftstuff as mcstuff
from time import sleep
class Planet():
def __init__(self, pos, radius, blockType, blockData = 0):
self.mc = minecraft.Minecraft.create()
self.pos = pos
self.radi
|
us = radius
self.blockType = blockType
self.blockData = blockData
self._draw()
def _draw(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, self.blockType, self.blockData)
def destroy(self, delay):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
#mcDraw
|
.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
# self.radius, block.LAVA_STATIONARY.id)
#sleep(delayLava)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.COBBLESTONE.id)
sleep(delay)
self.clear()
def clear(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.AIR.id)
|
shakle17/django_range_slider
|
test_slider/slider_app/widgets.py
|
Python
|
mit
| 1,469
| 0.007488
|
from django import forms
from django.utils.safestring import mark_safe
import re
class RangeSlider(forms.TextInput):
def __init__(self, minimum, maximum, step, elem_name,*args,**kwargs):
widget = super(RangeSlider,self).__init__(*args,**kwargs)
self.minimum = str(minimum)
self.maximum = str(maximum)
self.step = str(step)
self.elem_name = str(elem_name)
def render(self, name, value, attrs=None, renderer=None):
s = super(RangeSlider, self).render(name, value, attrs)
self.elem_id = re.findall(r'id_([A-Za-z0-9_\./\\-]*)"',s)[0]
html = """<div id="slider-range-"""+self.elem_id+""""></div>
<s
|
cript>
$('#id_"""+self.elem_id+"""').attr("readonly", true)
$( "#slider-range-"""+self.elem_id + """" ).slider({
range: true,
min: """+self.minimum+""",
max: """+self.maximum
|
+""",
step: """+self.step+""",
values: [ """+self.minimum+""","""+self.maximum+""" ],
slide: function( event, ui ) {
$( "#id_"""+self.elem_id+"""" ).val(" """ + self.elem_name + """ "+ ui.values[ 0 ] + " - " + ui.values[ 1 ] );
}
});
$( "#id_"""+self.elem_id+"""" ).val(" """ + self.elem_name + """ "+ $( "#slider-range-""" + self.elem_id + """" ).slider( "values", 0 ) +
" - " + $( "#slider-range-"""+ self.elem_id + """" ).slider( "values", 1 ) );
</script>
"""
return mark_safe(s+html)
|
battlemidget/taiga-ncurses
|
taiga_ncurses/ui/views/backlog.py
|
Python
|
apache-2.0
| 3,880
| 0.001291
|
# -*- coding: utf-8 -*-
"""
taiga_ncurses.ui.views.backlog
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import urwid
from taiga_ncurses.ui.widgets import generic, backlog
from . import base
class ProjectBacklogSubView(base.SubView):
help_popup_title = "Backlog Help Info"
help_popup_info = base.SubView.help_popup_info + (
( "Backlog Movements:", (
("↑ | k | ctrl p", "Move Up"),
("↓ | j | ctrl n", "Move Down"),
("← | h | ctrl b", "Move Left"),
("→ | l | ctrl f", "Move Right"),
)),
( "User Stories Actions:", (
("n", "Create new US"),
("N", "Create new USs in bulk"),
("e", "Edit selected US"),
("Supr", "Delete selected US"),
("K", "Move selected US up"),
("J", "Move selected US down"),
("w", "Save the position of all USs"),
("m", "Move selected US to a Milestone"),
("r", "Refresh the screen")
)),
)
def __init__(self, parent_view, project, notifier, tabs):
super().__init__(parent_view)
self.project = project
self.notifier = notifier
self.stats = backlog.BacklogStats(project)
self.user_stories = backlog.UserStoryList(project)
list_walker = urwid.SimpleFocusListWalker([
tabs,
generic.box_solid_fill(" ", 1),
self.stats,
generic.box_solid_fill(" ", 1),
self.user_stories
])
list_walker.set_focus(4)
self.widget = urwid.ListBox(list_walker)
def open_user_story_form(self, user_story={}):
self.user_story_form = backlog.UserStoryForm(self.project, user_story=user_story)
# FIXME: Calculate the form size
self.parent.show_widget_on_top(self.user_story_form, 80, 24)
def close_user_story_form(self):
del self.user_story_form
self.parent.hide_widget_on_top()
def get_user_story_form_data(self):
data = {}
if hasattr(self, "user_story_form"):
data.update({
"subject": self.user_story_form.subject,
"milestone": self.user_story_form.milestone,
"points": self.user_story_form.points,
"status": self.user_story_form.status,
"is_blocked": self.user_story_form.is_blocked,
"blocked_note": self.user_story_form.blocked_note,
"tags": self.user_story_form.tags,
"description": self.user_story_form.description,
"team_requirement": self.user
|
_story_form.team_requirement,
"client_requirement": self.user_story_form.client_requirement,
"project": self.project["id"],
})
return data
def open_user_stories_in_b
|
ulk_form(self):
self.user_stories_in_bulk_form = backlog.UserStoriesInBulkForm(self.project)
# FIXME: Calculate the form size
self.parent.show_widget_on_top(self.user_stories_in_bulk_form, 80, 24)
def close_user_stories_in_bulk_form(self):
del self.user_stories_in_bulk_form
self.parent.hide_widget_on_top()
def get_user_stories_in_bulk_form_data(self):
data = {}
if hasattr(self, "user_stories_in_bulk_form"):
data.update({
"bulkStories": self.user_stories_in_bulk_form.subjects,
"projectId": self.project["id"],
})
return data
def open_milestones_selector_popup(self, user_story={}):
self.milestone_selector_popup = backlog.MIlestoneSelectorPopup(self.project, user_story)
# FIXME: Calculate the popup size
self.parent.show_widget_on_top(self.milestone_selector_popup, 100, 30)
def close_milestone_selector_popup(self):
del self.milestone_selector_popup
self.parent.hide_widget_on_top()
|
obi-two/Rebelion
|
data/scripts/templates/object/tangible/loot/bestine/shared_bestine_painting_schematic_ronka.py
|
Python
|
mit
| 503
| 0.043738
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
|
result.template = "object/tangible/loot/bestine/shared_bestine_painting_schematic_ronka.iff"
result.attribute_template_id = -1
result.stfName("craft_furniture_ingredients_n","painting_schematic_ronka")
#### BEGIN MODIFICATIONS ####
#### END MODIFICA
|
TIONS ####
return result
|
edgedb/edgedb
|
tests/test_edgeql_casts.py
|
Python
|
apache-2.0
| 82,780
| 0
|
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2018-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import itertools
import os.path
import edgedb
from edb.testbase import server as tb
class TestEdgeQLCasts(tb.QueryTestCase):
'''Testing symmetry and validity of casts.
Scalar casting is symmetric in the sense that if casting scalar
type X into Y is valid then it is also valid to cast Y into X.
Some casts are lossless. A cast from X into Y is lossless if all
the relevant details of the value of type X can be unambiguously
represented by a value of type Y. Examples of lossless casts:
- any scalar can be losslessly cast into a str
- int16 and int32 can be losslessly cast into int64
- int16 can be losslessly cast into float32
- any numeric type can be losslessly cast into a decimal
Sometimes only specific values (a subset of the entire domain of
the scalar) can be cast losslessly:
- 2147299968 can be cast losslessly into a float32, but not 2147299969
- decimal 2.5 can be cast losslessly into a float32, but not decimal
2.5000000001
Consider two types X and Y with corresponding values x and y.
If x can be losslessly cast into Y, then casting it back is also lossless:
x = <X><Y>x
'''
# FIXME: a special schema should be used here since we need to
# cover all known scalars and even some arrays and tuples.
SCHEMA = os.path.join(os.path.dirname(__file__), 'schemas',
'casts.esdl')
SETUP = os.path.join(os.path.dirname(__file__), 'schemas',
'casts_setup.edgeql')
# NOTE: nothing can be cast into bytes
async def test_edgeql_casts_bytes_01(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>True;
""")
async def test_edgeql_casts_bytes_02(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>uuid_generate_v1mc();
""")
async def test_edgeql_casts_bytes_03(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>'Hello';
""")
async def test_edgeql_casts_bytes_04(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError, r'expected JSON string or null'):
await self.con.query_single("""SELECT <bytes>to_json('1');"""),
self.assertEqual(
await self.con.query_single(r'''
SELECT <bytes>to_json('"aGVsbG8="');
'''),
b'hello',
)
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError, r'invalid symbol'):
await self.con.query_single("""
SELECT <bytes>to_json('"not base64!"');
""")
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError, r'invalid base64 end sequence'):
await self.con.query_single("""
SELECT <bytes>to_json('"a"');
""")
async def test_edgeql_casts_bytes_05(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>datetime_current();
""")
async def test_edgeql_casts_bytes_06(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
S
|
ELECT
<bytes>cal::to_local_datetime('2018-05-07T20:01:22.306916');
|
""")
async def test_edgeql_casts_bytes_07(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>cal::to_local_date('2018-05-07');
""")
async def test_edgeql_casts_bytes_08(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>cal::to_local_time('20:01:22.306916');
""")
async def test_edgeql_casts_bytes_09(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_duration(hours:=20);
""")
async def test_edgeql_casts_bytes_10(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_int16('2');
""")
async def test_edgeql_casts_bytes_11(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_int32('2');
""")
async def test_edgeql_casts_bytes_12(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_int64('2');
""")
async def test_edgeql_casts_bytes_13(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_float32('2');
""")
async def test_edgeql_casts_bytes_14(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_float64('2');
""")
async def test_edgeql_casts_bytes_15(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_decimal('2');
""")
async def test_edgeql_casts_bytes_16(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError, r'cannot cast'):
await self.con.execute("""
SELECT <bytes>to_bigint('2');
""")
# NOTE: casts are idempotent
async def test_edgeql_casts_idempotence_01(self):
await self.assert_query_result(
r'''SELECT <bool><bool>True IS bool;''',
[True],
)
await self.assert_query_result(
r'''SELECT <bytes><bytes>b'Hello' IS bytes;''',
[True],
)
await self.assert_query_result(
r'''SELECT <str><str>'Hello' IS str;''',
[True],
)
await self.assert_query_result(
r'''SELECT <json><json>to_json('1') IS json;''',
[True],
)
await self.assert_query_result(
r'''SELECT <uuid><uuid>uuid_generate_v1mc() IS uuid;''',
[True],
)
await self.assert_query_result(
r'''SELECT <datetime><datetime>datetime_current() IS datetime;''',
[True],
)
await self.assert_query_result(
r'''
SELECT <cal::local_datetime><cal::local_datetime>
cal::to_local_datetime(
'2018-05-07T20:01:22.306916') IS cal::local_datetime;
''',
[True],
)
await self.assert_query_result(
r'''
|
amitgroup/amitgroup
|
examples/parts_descriptor_test.py
|
Python
|
bsd-3-clause
| 608
| 0.008224
|
import amitgroup as ag
import numpy as np
ag.set_verbose(True)
# This requires you to have the MNIST data set.
data, digits = ag.io.load_mnist('training', selection=slice(0, 100))
pd = ag.features.PartsDescriptor((5,
|
5), 20, patch_frame=1, edges_threshold=5, samples_per_image=10)
#
|
Use only 100 of the digits
pd.train_from_images(data)
# Save the model to a file.
#pd.save('parts_model.npy')
# You can then load it again by
#pd = ag.features.PartsDescriptor.load(filename)
# Then you can extract features by
#features = pd.extract_features(image)
# Visualize the parts
ag.plot.images(pd.visparts)
|
toumorokoshi/uranium
|
uranium/tests/lib/test_context.py
|
Python
|
mit
| 1,065
| 0
|
import pytest
from uranium.lib.context import Proxy, ContextStack, ContextUnavailable
@pytest.fixture
def context_stack():
return ContextStack()
@pytest.fixture
def proxy(context_stack):
return Proxy(context_stack)
def test_context_stack(context_stack
|
):
obj1 = object()
obj2 = object()
with pytest.raises(ContextUnavailable):
context_stack.obj
context_stack.push(obj1)
assert context_stack.obj is obj1
context_stack.push(obj2)
assert context_stack.obj is obj2
context_stack.pop()
assert context_stack.obj is obj1
with context_stack.create_context(obj2):
assert context_stack.obj is obj2
assert context_stack.pop() is ob
|
j1
with pytest.raises(ContextUnavailable):
context_stack.obj
def test_context_proxy(context_stack, proxy):
class TestObj(object):
pass
obj = TestObj()
obj.foo = 3
obj.bar = 6
with context_stack.create_context(obj):
assert proxy.foo == 3
assert proxy.bar == 6
proxy.bar = 7
assert obj.bar == 7
|
Crypt0s/Ramen
|
fs_libs/ftputil/sandbox/test_ticket_71.py
|
Python
|
gpl-3.0
| 411
| 0.002433
|
#! /usr/bin/env python
import ftplib
import ftputil
ftp_host = ftputil.FTPHost("lo
|
calhost", "ftptest",
"d605581757de5eb56d568a4419f4126e")
ftp_host._session.set_debugl
|
evel(2)
#import pdb; pdb.set_trace()
ftp_host.listdir("/rootdir2")
print
ftp = ftplib.FTP("localhost", "ftptest", "d605581757de5eb56d568a4419f4126e")
ftp.set_debuglevel(2)
ftp.cwd("/")
ftp.cwd("/")
ftp.dir("-a")
|
ahnitz/pycbc
|
docs/_include/distributions-table.py
|
Python
|
gpl-3.0
| 1,274
| 0
|
#!/usr/bin/env python
# Copyright (C) 2018 Duncan Macleod, Collin Capano
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Prints an RST table of available distributions from the distributions
module.
"""
from __future__ import (print_function, absolute_import)
from pycbc.distributions import distribs
from _dict_to_rst import (rst_dict_table, format_class)
tbl = rst_dict_table(distribs, key_format='``\'{0}\'``
|
'.format,
|
header=('Name', 'Class'),
val_format=format_class)
filename = 'distributions-table.rst'
with open(filename, 'w') as fp:
print(tbl, file=fp)
|
kingsamchen/Eureka
|
crack-data-structures-and-algorithms/leetcode/find_the_celebrity_q277.py
|
Python
|
mit
| 1,367
| 0
|
# The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
# 核心思路
# 保证O(n)时间复杂度,否则会TLE
# 第一步选取我们的候选celebrity,主要通过假定一个候选i,然后遍历j(i!=j)检查knows(i,j)返回值
# 如果返回True,则表明i认识j,则i一定不是候选者,将i替换为j,继续遍历;
# 如果返回False,则说明i有可能是候选者,继续遍历
# 第二轮是校验候选i是否是真的celebrity,所以需要从头遍历(跳过自己)
# 如果候选人i认识j或者j不认识i,则i不是celebrity;因为我们唯一的候选人出局了,所以表明当前条件下无celebrity
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
# our candidate celebrity cannot know anyone except itself.
candidate = 0
for i in range(1, n):
if knows(candidate, i):
candidate = i
# our candidate may not be the true celebrity
# lets verify it.
for i in range(n):
if candidate == i:
co
|
ntinue
if kno
|
ws(candidate, i) or not knows(i, candidate):
return -1
# aha, it passes all tests.
return candidate
|
Debian/openjfx
|
modules/web/src/main/native/Tools/Scripts/webkitpy/port/driver.py
|
Python
|
gpl-2.0
| 32,610
| 0.003312
|
# Copyright (C) 2011 Google Inc. All rights reserved.
# Copyright (c) 2015, 2016 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUE
|
NTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import base64
import logging
import re
import shlex
import sys
import time
import os
from webkitpy.common.system import path
from webkitpy.common.system.profiler import ProfilerFactory
_log = logging.getLogger(__name__)
class DriverInput(object):
def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, should_dump_jsconsolelog_in_stderr=None, args=None):
self.test_name = test_name
self.timeout = timeout # in ms
self.image_hash = image_hash
self.should_run_pixel_test = should_run_pixel_test
self.should_dump_jsconsolelog_in_stderr = should_dump_jsconsolelog_in_stderr
self.args = args or []
def __repr__(self):
return "DriverInput(test_name='{}', timeout={}, image_hash={}, should_run_pixel_test={}, should_dump_jsconsolelog_in_stderr={}'".format(self.test_name, self.timeout, self.image_hash, self.should_run_pixel_test, self.should_dump_jsconsolelog_in_stderr)
class DriverOutput(object):
"""Groups information about a output from driver for easy passing
and post-processing of data."""
metrics_patterns = []
metrics_patterns.append((re.compile('at \(-?[0-9]+,-?[0-9]+\) *'), ''))
metrics_patterns.append((re.compile('size -?[0-9]+x-?[0-9]+ *'), ''))
metrics_patterns.append((re.compile('text run width -?[0-9]+: '), ''))
metrics_patterns.append((re.compile('text run width -?[0-9]+ [a-zA-Z ]+: '), ''))
metrics_patterns.append((re.compile('RenderButton {BUTTON} .*'), 'RenderButton {BUTTON}'))
metrics_patterns.append((re.compile('RenderImage {INPUT} .*'), 'RenderImage {INPUT}'))
metrics_patterns.append((re.compile('RenderBlock {INPUT} .*'), 'RenderBlock {INPUT}'))
metrics_patterns.append((re.compile('RenderTextControl {INPUT} .*'), 'RenderTextControl {INPUT}'))
metrics_patterns.append((re.compile('\([0-9]+px'), 'px'))
metrics_patterns.append((re.compile(' *" *\n +" *'), ' '))
metrics_patterns.append((re.compile('" +$'), '"'))
metrics_patterns.append((re.compile('- '), '-'))
metrics_patterns.append((re.compile('\n( *)"\s+'), '\n\g<1>"'))
metrics_patterns.append((re.compile('\s+"\n'), '"\n'))
metrics_patterns.append((re.compile('scrollWidth [0-9]+'), 'scrollWidth'))
metrics_patterns.append((re.compile('scrollHeight [0-9]+'), 'scrollHeight'))
metrics_patterns.append((re.compile('scrollX [0-9]+'), 'scrollX'))
metrics_patterns.append((re.compile('scrollY [0-9]+'), 'scrollY'))
metrics_patterns.append((re.compile('scrolled to [0-9]+,[0-9]+'), 'scrolled'))
def __init__(self, text, image, image_hash, audio, crash=False,
test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??',
crashed_pid=None, crash_log=None, pid=None):
# FIXME: Args could be renamed to better clarify what they do.
self.text = text
self.image = image # May be empty-string if the test crashes.
self.image_hash = image_hash
self.image_diff = None # image_diff gets filled in after construction.
self.audio = audio # Binary format is port-dependent.
self.crash = crash
self.crashed_process_name = crashed_process_name
self.crashed_pid = crashed_pid
self.crash_log = crash_log
self.test_time = test_time
self.measurements = measurements
self.timeout = timeout
self.error = error # stderr output
self.pid = pid
def has_stderr(self):
return bool(self.error)
def strip_metrics(self):
self.strip_patterns(self.metrics_patterns)
def strip_patterns(self, patterns):
if not self.text:
return
for pattern in patterns:
self.text = re.sub(pattern[0], pattern[1], self.text)
def strip_stderror_patterns(self, patterns):
if not self.error:
return
for pattern in patterns:
self.error = re.sub(pattern[0], pattern[1], self.error)
class Driver(object):
"""object for running test(s) using DumpRenderTree/WebKitTestRunner."""
def __init__(self, port, worker_number, pixel_tests, no_timeout=False):
"""Initialize a Driver to subsequently run tests.
Typically this routine will spawn DumpRenderTree in a config
ready for subsequent input.
port - reference back to the port object.
worker_number - identifier for a particular worker/driver instance
"""
self._port = port
self._worker_number = worker_number
self._no_timeout = no_timeout
self._driver_tempdir = None
self._driver_user_directory_suffix = None
self._driver_user_cache_directory = None
# WebKitTestRunner can report back subprocess crashes by printing
# "#CRASHED - PROCESSNAME". Since those can happen at any time and ServerProcess
# won't be aware of them (since the actual tool didn't crash, just a subprocess)
# we record the crashed subprocess name here.
self._crashed_process_name = None
self._crashed_pid = None
self._driver_timed_out = False
# stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated
# stderr output, as well as if we've seen #EOF on this driver instance.
# FIXME: We should probably remove _read_first_block and _read_optional_image_block and
# instead scope these locally in run_test.
self.error_from_test = str()
self.err_seen_eof = False
self._server_name = self._port.driver_name()
self._server_process = None
self._measurements = {}
if self._port.get_option("profile"):
profiler_name = self._port.get_option("profiler")
self._profiler = ProfilerFactory.create_profiler(self._port.host,
self._port._path_to_driver(), self._port.results_directory(), profiler_name)
else:
self._profiler = None
self.web_platform_test_server_doc_root = self._port.web_platform_test_server_doc_root()
self.web_platform_test_server_base_url = self._port.web_platform_test_server_base_url()
def __del__(self):
self.stop()
def run_test(self, driver_input, stop_when_done):
"""Run a single test and return the results.
Note that it is okay if a test times out or crashes and leaves
the driver in an indeterminate state. The upper layers of the program
are responsible for cleaning up and ensuring things are okay.
Returns a DriverOutput object.
"""
sta
|
Knewton/pettingzoo-python
|
pettingzoo/deleted.py
|
Python
|
apache-2.0
| 3,159
| 0.034505
|
import zc.zk
import zookeeper
import threading
import sys
import traceback
import pettingzoo.testing
class Deleted(zc.zk.NodeInfo):
"""
This class is implementing the zc.zk
"""
event_type = zookeeper.DELETED_EVENT
def __init__(self, session, path, callbacks=[]):
zc.zk.ZooKeeper._ZooKeeper__zkfuncs[zookeeper.DELETED_EVENT] = 'exists'
self.session = session
self.path = path
self.callbacks = callbacks
self._set_watch()
def _set_watch(self):
"""
Internal function, not intended for external calling
"""
self.key = (self.event_type, self.path)
if self.session.watches.add(self.key, self):
try:
self._watch_key()
except zookeeper.ConnectionLossException:
watches = set(self.session.watches.pop(self.key))
for w in watches:
w._deleted()
if self.session in watches:
watches.remove(self.session)
if watches:
zc.zk.logger.critical('lost watches %s' % watches)
raise
pass
else:
self._notify(self.session.exists(self.path))
def _watch_key(self):
"""
Internal function, not intended for external calling
"""
zkfunc = getattr(zookeeper, 'exists')
def handler(h, t, state, p, reraise=False):
if state != zookeeper.CONNECTED_STATE:
zc.zk.logger.warning(
"Node watcher event %r with non-connected state, %r",
t, state)
return
try:
assert h == self.session.handle
assert state == zookeeper.CONNECTED_STATE
assert p == self.path
if self.key not in self.session.watches:
return
assert t == self.event_type or \
t == self.event_type | pettingzoo.testing.TESTING_FLAG
try:
v = zkfunc(self.session.handle, self.path, handler)
except zookeeper.NoNodeException:
if t & pettingzoo.testing.TESTING_FLAG:
v = None
for watch in self.session.watches.watches(self.key):
watch._notify(v)
else:
self._rewatch()
else:
for watch in self.session.watches.watches(self.key):
watch._notify(v)
except:
exc_class, exc, tb = sys.exc_info()
sys.stderr.write(str(exc_class) + "\n")
traceback.print_tb(tb)
zc.zk.logger.exception(
"%s(%s) handler failed", 'exists', self.path)
if reraise:
raise exc_class, exc, tb
handler(
self.session.handle,
self.event_type,
self.session.state,
self.path,
True)
def _rewatch(self):
"""
Internal function, not intended for external calling
"""
#FIXME: this
|
is not being exercised in tests
for watch in self.session.watches.pop(self.key):
try:
self.path = self.session.resolve(self.path)
except (zookeeper.NoNodeException, zc.
|
zk.LinkLoop):
zc.zk.logger.exception("%s path went away", watch)
watch._deleted()
else:
self._set_watch()
def _notify(self, data):
"""
Internal function, not intended for external calling
"""
if data == None:
for callback in list(self.callbacks):
try:
callback(self)
except Exception, v:
self.callbacks.remove(callback)
if isinstance(v, zc.zk.CancelWatch):
zc.zk.logger.debug(
"cancelled watch(%r, %r)", self, callback)
else:
zc.zk.logger.exception("watch(%r, %r)", self, callback)
|
k-okada/vcstools
|
setup.py
|
Python
|
bsd-3-clause
| 1,330
| 0.001504
|
from setuptools import setup
import imp
def get_version():
ver_file = None
try:
ver_file, pathname, description = imp.find_module('__version__', ['src/vcstools'])
vermod = imp.load_module('__version__', ver_file, pathname, description)
version = vermod.version
return version
finally:
if ver_file is not None:
ver_file.close()
setup(name='vcstools',
version=get_version(),
packages=['vcstools'],
package_dir={'': 'src'},
scripts=[],
install_requires=['pyyaml', 'python-dateutil'],
author="Tully Foote, Thibault Kruse, Ken Conley",
author_email="tfoote@osrfoundation.org",
url="http://wiki.ros.org/vcstools",
download_url="http://download.ros.org/downloads/vcstools/",
keywords=["scm", "vcs", "git", "s
|
vn", "hg", "bzr"],
classifiers=["Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: B
|
SD License"],
description="VCS/SCM source control library for svn, git, hg, and bzr",
long_description="""\
Library for managing source code trees from multiple version control systems.
Current supports svn, git, hg, and bzr.
""",
license="BSD")
|
fmin2958/POCS
|
panoptes/mount/ioptron.py
|
Python
|
mit
| 10,044
| 0.003189
|
import re
from astropy import units as u
from astropy.coordinates import SkyCoord
from astropy.time import Time
from panoptes.mount.mount import AbstractMount
from ..utils.logger import has_logger
from ..utils.config import load_config
from ..utils import error as error
@has_logger
class Mount(AbstractMount):
"""
Mount class for iOptron mounts. Overrides the base `initialize` method
and providers some helper methods to convert coordinates.
"""
def __init__(self, *args, **kwargs):
self.logger.info('Creating iOptron mount')
super().__init__(*args, **kwargs)
self.config = load_config()
# Regexp to match the iOptron RA/Dec format
self._ra_format = '(?P<ra_millisecond>\d{8})'
self._dec_format = '(?P<dec_sign>[\+\-])(?P<dec_arcsec>\d{8})'
self._coords_format = re.compile(self._dec_format + self._ra_format)
self._raw_status = None
self._status_format = re.compile(
'(?P<gps>[0-2]{1})' +
'(?P<system>[0-7]{1})' +
'(?P<tracking>[0-4]{1})' +
'(?P<movement_speed>[1-9]{1})' +
'(?P<time_source>[1-3]{1})' +
'(?P<hemisphere>[01]{1})'
)
self._status_lookup = {
'gps': {
'0': 'Off',
'1': 'On',
'2': 'Data Extracted'
},
'system': {
'0': 'Stopped - Not at Zero Position',
'1': 'Tracking (PEC disabled)',
'2': 'Slewing',
'3': 'Guiding',
'4': 'Meridian Flipping',
'5': 'Tracking (PEC enabled)',
'6': 'Parked',
'7': 'Stopped - Zero Position'
},
'tracking': {
'0': 'Sidereal',
'1': 'Lunar',
'2': 'Solar',
'3': 'King',
'4': 'Custom'
},
'movement_speed': {
'1': '1x sidereal',
'2': '2x sidereal',
'3': '8x sidereal',
'4': '16x sidereal',
'5': '64x sidereal',
'6': '128x sidereal',
'7': '256x sidereal',
'8': '512x sidereal',
'9': 'Max sidereal',
},
'time_source': {
'1': 'RS-232',
'2': 'Hand Controller',
'3': 'GPS'
},
'hemisphere': {
'0': 'Southern',
'1': 'Northern'
}
}
self.logger.info('Mount created')
##################################################################################################
# Properties
##################################################################################################
@property
def is_parked(self):
""" bool: Mount parked status. """
self._is_parked = 'Parked' in self.status().get('system', '')
return self._is_parked
@property
def is_home(self):
""" bool: Mount home status. """
self._is_home = 'Stopped - Zero Position' in self.status().get('system', '')
return self._is_home
@property
def is_tracking(self):
""" bool: Mount tracking status. """
self._is_tracking = 'Tracking' in self.status().get('system', '')
return self._is_tracking
@property
def is_slewing(self):
""" bool: Mount slewing status. """
self._is_slewing = 'Slewing' in self.status().get('system', '')
return self._is_slewing
##################################################################################################
# Public Methods
##################################################################################################
def initialize(self):
""" Initialize the connection with the mount and setup for location.
iOptron mounts are initialized by sending the following two commands
to the mount:
* Version
* MountInfo
If the mount is successfully initialized, the `_setup_location_for_mount` method
is also called.
Returns:
bool: Returns the value from `self.is_initialized`.
"""
self.logger.info('Initializing {} mount'.format(__name__))
if not self.is_connected:
self.connect()
if self.is_connected and not self.is_initialized:
# We trick the mount into thinking it's initialized while we
# initialize otherwise the `serial_query` method will test
# to see if initialized and be put into loop.
self.is_initialized = True
actual_version = self.serial_query('version')
actual_mount_info = self.serial_query('mount_info')
expected_version = self.commands.get('version').get('response')
expected_mount_info = self.commands.get( 'mount_info').get('response')
self.is_initialized = False
# Test our init procedure for iOptron
if actual_version != expected_version or actual_mount_info != expected_mount_info:
self.logger.debug('{} != {}'.format(actual_version, expected_version))
self.logger.debug('{} != {}'.format(actual_mount_info, expected_mount_info))
raise error.MountNotFound('Problem initializing mount')
else:
self.is_initialized = True
self._setup_location_for_mount()
self.logger.info('Mount initialized: {}'.format(self.is_initialized))
|
return self.is_initialized
##################################################################################################
# Private Methods
##################################################################################################
def _setup_location_for_mount(self):
"""
Sets the mount up to the current location. Mount must be initialized first.
|
This uses mount.location (an astropy.coords.EarthLocation) to set most of the params and the rest is
read from a config file. Users should not call this directly.
Includes:
* Latitude set_long
* Longitude set_lat
* Daylight Savings disable_daylight_savings
* Universal Time Offset set_gmt_offset
* Current Date set_local_date
* Current Time set_local_time
"""
assert self.is_initialized, self.logger.warning('Mount has not been initialized')
assert self.location is not None, self.logger.warning( 'Please set a location before attempting setup')
self.logger.info('Setting up mount for location')
# Location
# Adjust the lat/long for format expected by iOptron
lat = '{:+07.0f}'.format(self.location.latitude.to(u.arcsecond).value)
lon = '{:+07.0f}'.format(self.location.longitude.to(u.arcsecond).value)
self.serial_query('set_long', lon)
self.serial_query('set_lat', lat)
# Time
self.serial_query('disable_daylight_savings')
gmt_offset = self.config.get('location').get('gmt_offset', 0)
self.serial_query('set_gmt_offset', gmt_offset)
now = Time.now() + gmt_offset * u.minute
self.serial_query('set_local_time', now.datetime.strftime("%H%M%S"))
self.serial_query('set_local_date', now.datetime.strftime("%y%m%d"))
def _mount_coord_to_skycoord(self, mount_coords):
"""
Converts between iOptron RA/Dec format and a SkyCoord
Args:
mount_coords (str): Coordinates as returned by mount
Returns:
astropy.SkyCoord: Mount coordinates as astropy SkyCoord with
EarthLocation included.
"""
coords_match = self._coords_format.fullmatch(mount_coords)
coords = None
self.logger.info("Mount coordinates: {}".format(coords_match))
if coords_match is not None:
ra = (coords_match.group('ra_millisecond') * u.millisecond).to(u.hour)
dec = (coords_match.group('dec_arcsec') * u.centiarcsecond).to(u.arcsec)
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/plugins/terminal/vyos.py
|
Python
|
bsd-3-clause
| 1,700
| 0.000588
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(Termi
|
nalBase):
terminal_stdout_re
|
= [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\@[\w\-\.]+:\S+?[>#\$] ?$")
]
terminal_stderr_re = [
re.compile(br"\n\s*Invalid command:"),
re.compile(br"\nCommit failed"),
re.compile(br"\n\s+Set failed"),
]
terminal_length = os.getenv('ANSIBLE_VYOS_TERMINAL_LENGTH', 10000)
def on_open_shell(self):
try:
for cmd in (b'set terminal length 0', b'set terminal width 512'):
self._exec_cli_command(cmd)
self._exec_cli_command(b'set terminal length %d' % self.terminal_length)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
|
akellne/simhash
|
setup.py
|
Python
|
mit
| 639
| 0.045383
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = 'simhash',
version = '1.8.0',
keywords = ('simhash'),
description = 'A Python implementation of Simhash Algorithm',
license = 'MIT License',
url = 'http://leons.im/posts/a-python-implementation-of-simhash-algorithm/',
author = '1e0n',
author_email = 'i@leons.im',
packages = find_packages(),
include_package_data = True,
platforms = 'any',
install_requires = [],
tests_require = [
'nose',
'numpy',
'scipy',
'scikit-learn
|
',
],
|
test_suite = "nose.collector",
)
|
EBI-Metagenomics/emgapi
|
emgapi/migrations/0007_split_run.py
|
Python
|
apache-2.0
| 7,178
| 0.002647
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-24 08:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
def populate_status(apps, schema_editor):
Status = apps.get_model("emgapi", "Status")
st = (
(1, "draft"),
(2, "private"),
(3, "cancelled"),
(4, "public"),
(5, "suppressed"),
(6, "killed"),
(7, "temporary_suppressed"),
(8, "temporary_killed"),
)
_statuses = list()
for s in st:
_statuses.append(
Status(
status_id=s[0],
status=s[1],
)
)
Status.objects.bulk_create(_statuses)
def populate_runs(apps, schema_editor):
# Too slow
# migrations.RunSQL(
# """UPDATE ANALYSIS_JOB t1
# SET RUN_ID = (
# SELECT t2.RUN_ID FROM RUN t2
# WHERE t1.EXTERNAL_RUN_IDS=t2.EXTERNAL_RUN_IDS
# """
# ),
AnalysisJob = apps.get_model("emgapi", "AnalysisJob")
Run = apps.get_model("emgapi", "Run")
for aj in AnalysisJob.objects.all():
aj.run = Run.objects.get(accession=aj.external_run_ids)
aj.save()
class Migration(migrations.Migration):
dependencies = [
('emgapi', '0006_download'),
]
operations = [
migrations.AlterField(
model_name='experimenttype',
name='experiment_type_id'
|
,
field=models.SmallIntegerField(db_column='EXPERIMENT_TYPE_ID', primary_key=True, serialize=False),
),
|
migrations.RenameField(
model_name='analysisjob',
old_name='accession',
new_name='external_run_ids',
),
migrations.AlterField(
model_name='analysisjob',
name='external_run_ids',
field=models.CharField(blank=True, db_column='EXTERNAL_RUN_IDS', max_length=100, null=True),
),
migrations.AlterField(
model_name='analysisjob',
name='secondary_accession',
field=models.CharField(blank=True, db_column='SECONDARY_ACCESSION', max_length=100, null=True),
),
migrations.AlterField(
model_name='analysisjob',
name='job_operator',
field=models.CharField(db_column='JOB_OPERATOR', blank=True, null=True, max_length=15),
),
migrations.AlterField(
model_name='analysisjob',
name='pipeline',
field=models.ForeignKey(blank=True, db_column='PIPELINE_ID', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='emgapi.Pipeline'),
),
migrations.AlterField(
model_name='analysisjob',
name='submit_time',
field=models.DateTimeField(blank=True, db_column='SUBMIT_TIME', null=True),
),
migrations.AlterModelOptions(
name='analysisjob',
options={'ordering': ('job_id',)},
),
migrations.CreateModel(
name='Status',
fields=[
('status_id', models.SmallIntegerField(db_column='STATUS_ID', primary_key=True, serialize=False)),
('status', models.CharField(db_column='STATUS', max_length=25)),
],
options={
'db_table': 'STATUS',
'ordering': ('status_id',),
},
),
migrations.RunPython(populate_status),
migrations.CreateModel(
name='Run',
fields=[
('run_id', models.BigAutoField(db_column='RUN_ID', primary_key=True, serialize=False)),
('accession', models.CharField(blank=True, db_column='ACCESSION', max_length=80, null=True)),
('secondary_accession', models.CharField(blank=True, db_column='SECONDARY_ACCESSION', max_length=100, null=True)),
('instrument_platform', models.CharField(blank=True, db_column='INSTRUMENT_PLATFORM', max_length=100, null=True)),
('instrument_model', models.CharField(blank=True, db_column='INSTRUMENT_MODEL', max_length=100, null=True)),
],
options={
'db_table': 'RUN',
},
),
migrations.AddField(
model_name='run',
name='status_id',
field=models.ForeignKey(db_column='STATUS_ID', on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='emgapi.Status', default=2),
),
migrations.AddField(
model_name='run',
name='sample',
field=models.ForeignKey(blank=True, db_column='SAMPLE_ID', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='emgapi.Sample'),
),
migrations.AddField(
model_name='run',
name='study',
field=models.ForeignKey(blank=True, db_column='STUDY_ID', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='emgapi.Study'),
),
migrations.AddField(
model_name='run',
name='experiment_type',
field=models.ForeignKey(blank=True, db_column='EXPERIMENT_TYPE_ID', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='emgapi.ExperimentType'),
),
migrations.AddField(
model_name='analysisjob',
name='run',
field=models.ForeignKey(blank=True, db_column='RUN_ID', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='emgapi.Run'),
),
migrations.AlterUniqueTogether(
name='run',
unique_together=set([('run_id', 'accession'), ('accession', 'secondary_accession')]),
),
migrations.AlterModelOptions(
name='analysisjobdownload',
options={'ordering': ('pipeline', 'group_type', 'alias')},
),
migrations.AlterModelOptions(
name='run',
options={'ordering': ('accession',)},
),
migrations.AlterModelOptions(
name='studydownload',
options={'ordering': ('pipeline', 'group_type', 'alias')},
),
migrations.RunSQL(
"""INSERT INTO RUN (ACCESSION, SECONDARY_ACCESSION,
STATUS_ID, SAMPLE_ID, STUDY_ID, EXPERIMENT_TYPE_ID,
INSTRUMENT_PLATFORM, INSTRUMENT_MODEL)
SELECT distinct EXTERNAL_RUN_IDS, SECONDARY_ACCESSION,
RUN_STATUS_ID, SAMPLE_ID, STUDY_ID, EXPERIMENT_TYPE_ID,
group_concat(
INSTRUMENT_PLATFORM
ORDER BY INSTRUMENT_PLATFORM
SEPARATOR ','
) as INSTRUMENT_PLATFORM,
group_concat(
INSTRUMENT_MODEL
ORDER BY INSTRUMENT_MODEL
SEPARATOR ','
) as INSTRUMENT_MODEL
FROM ANALYSIS_JOB
GROUP BY EXTERNAL_RUN_IDS
"""
),
migrations.RunPython(populate_runs),
]
|
firesunCN/My_CTF_Challenges
|
bctf_2017/diary/diary_server/firecms/oauth_client/views.py
|
Python
|
gpl-3.0
| 9,288
| 0.01292
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import JsonResponse
from django.http import HttpResponseRedirect
from django.http import Http404, HttpResponse
from django.shortcuts import redirect
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib import auth
from django.contrib.auth.models import User
from django.contrib.auth import get_user_model
from oauth_client.models import Survey, Diary, Url
from random import Random
import time
import requests
import json
import re
import hashlib
oauth_server="http://auth.bctf.xctf.org.cn"
app_server="http://diary.bctf.xctf.org.cn"
client_id="nSGRX6eRNbkHBy457ZfiNG1JrUjJ9k7hZigKYA1q"
client_secret="S5Elk8WWpylMbNedDlpN12ds0zCgMNzcJCbHQqnc32Td4YKMpkLEX8F8i02I8DuYbcwZQvn0wbiiSlGSNHGhlLoOxos4xqHE5TCHvFlklUDkPF4VtNBVVdSKY8wC9btA"
UserModel = get_user_model()
# Create your views here.
@require_http_methods(["GET"])
def index(request):
return render(request, 'index.html')
@require_http_methods(["GET"])
def about(request):
return render(request, 'about.html')
@require_http_methods(["GET"])
def logout(request):
auth.logout(request)
return HttpResponseRedirect(oauth_server+"/accounts/logout/")
def _authenticate(request, username):
try:
user = UserModel._default_manager.get_by_natural_key(username)
except :
pass
else:
if _user_can_authenticate(user):
return user
return None
def _user_can_authenticate(user):
"""
Reject users with is_active=False. Custom user models that don't have
that attribute are allowed.
"""
is_active = getattr(user, 'is_active', None)
return is_active or is_active is None
@require_http_methods(["GET"])
@csrf_exempt
def receive_authcode(request):
if 'start_oauth' in request.session and request.session['start_oauth'] == 1:
request.session['start_oauth'] = 0
else:
raise Http404()
try:
if 'code' in request.GET:
code = request.GET.get('code', '').strip()
if code=='':
raise Http404()
url = oauth_server+'/o/token/'
s = requests.Session()
var = {'grant_type':'authorization_code',
'code':code,
'redirect_uri':app_server+'/o/receive_authcode',
'client_id':client_id,
'client_secret':client_secret,
}
r = s.post(url=url,data=var)
res=json.loads(r.text)
if 'access_token' in res:
access_token=res['access_token']
url = oauth_server+'/o/get-username/'
s = requests.Session()
var = {'token':access_token,}
headers = {'Authorization': 'Bearer '+access_token}
r = s.post(url=url,data=var,headers=headers)
res=json.loads(r.text)
username=res['username']
user = _authenticate(request, username)
if user!=None:
auth.login(request, user)
return redirect('/')
else:
new_user = User.objects.create_user(username=username, password="e6gqxLHvFR74LNBLvJpFDw20IrQH6nef")
new_user.save()
user = _authenticate(request, username)
if user!=None:
auth.login(request, user)
return redirect('/')
else:
raise Http404()
except:
pass
raise Http404()
@require_http_methods(["GET"])
def login(request):
if request.user.is_authenticated:
return redirect('/')
auth_url = oauth_server+"/o/authorize/?client_id="+client_id+"&state=preauth&response_type=code"
request.session['start_oauth'] = 1
return HttpResponseRedirect(auth_url)
@require_http_methods(["GET"])
def diary(request):
if not request.user.is_authenticated:
raise Http404()
return render(request, 'diary.html')
@require_http_methods(["GET","POST"])
def survey(request):
if not request.user.is_authenticated:
raise Http404()
if request.method != 'POST':
return render(request, 'survey.html')
rate = request.POST.get('rate', '')
if rate=='1':
rate=1
elif rate=='2':
rate=2
elif rate=='3':
rate=3
elif rate=='4':
rate=4
elif rate=='5':
rate=5
else:
return render(request, 'survey.html', {'msg': 'Rate is invalid!'})
suggestion = request.POST.get('suggestion', '').strip()
if len(suggestion) > 2000 :
return render(request, 'survey.html', {'msg': 'Advice is too long!'})
if len(suggestion) <= 0:
return render(request, 'survey.html', {'msg': 'Advice is empty!'})
try:
Survey.objects.get(username=request.user.username,rate=rate,suggestion=suggestion)
except Survey.DoesNotExist:
Survey.objects.create(username=request.user.username,rate=rate,suggestion=suggestion)
if request.user.username=="firesun":
return render(request, 'survey.html', {'msg': 'Thank you. I will give you the flag. Flag is bctf{bFJbSakOT72T8HbDIrlst4kXGYbaHWgV}'})
else:
return render(request, 'survey.html', {'msg': 'Thank you. But the boss said only admin can get the flag after he finishes this survey, XD'})
@require_http_methods(["GET","POST"])
def edit_diary(request):
if not request.user.is_authenticated:
raise Http404()
if request.user.username=="firesun":
return HttpResponse("Don't do this!")
if request.method != 'POST':
return render(request, 'edit_diary.html')
content = request.POST.get('content', '').strip()
if len(content) > 1000 :
return render(request, 'edit_diary.html', {'msg': 'Too long!'})
if len(content) <= 0:
return render(request, 'edit_diary.html', {'msg': 'Write something!'})
try:
diary=Diary.objects.get(username=request.user.username)
Diary.objects.filter(id=diary.id).update(content=content)
except Diary.DoesNotExist:
Diary.objects.create(username=request.user.username,content=content)
return redirect('/diary/')
@require_http_methods(["GET"])
def report_status(request):
try:
url=Url.objects.get(id=request.GET.get('id', ''))
if url.is_read:
return HttpResponse("Admin has visited the address.")
else:
return HttpResponse("Admin doesn't visit the address yet.")
except:
raise Http404()
def random_str(randomlength=5):
str = ''
chars = '0123456789abcdef'
length
|
= len(chars) - 1
random = Random()
for i in range(randomlength):
str+=chars[random.randint(0, length)]
return str
@require_http_methods(["GET","POST"])
def report_bugs(request):
if not request.user.is_authenticated:
raise Http404()
if request.method != 'POST':
captcha=random_str()
request.session['captcha']=captcha
return render(request, 'report.html',{'captcha': captcha})
else:
if ('captcha' in request.session) an
|
d (request.session['captcha'] == hashlib.md5(request.POST.get('captcha', '')).hexdigest()[0:5]):
captcha=request.session['captcha']
url = request.POST.get('url', '').strip()
if not url.startswith('http://diary.bctf.xctf.org.cn/'):
return render(request, 'report.html', {'msg': 'We only care about the problem from this website (http://diary.bctf.xctf.org.cn)!','captcha': captcha})
if len(url) > 200 or len(url) <= 0:
return render(request, 'report.html', {'msg': 'URL is too long!','captcha': captcha})
if not
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.