repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
AASHE/hub | hub/apps/content/types/academic.py | Python | mit | 2,754 | 0 | from django.db import models
from model_utils import Choices
from ...metadata.models import ProgramType, SustainabilityTopic
from ..models import ContentType, ContentTypeManager
from ..search import BaseIndex
class AcademicProgram(ContentType):
DISTANCE_CHOICES = Choices(
('local', 'Local Only'),
('distance', 'Distance Education'),
('both', 'Both'),
)
COMMITMENT_CHOICES = Choices(
('full', 'Full-Time'),
('part', 'Part-Time'),
('both', 'Both'),
)
program_type = models.ForeignKey(
ProgramType, null=True, verbose_name='Program Type')
outcomes = models.TextField(
'Learning Outcomes', blank=True, null=True,
help_text="Consider completing if different from description.")
completion = models.CharField(
'Expected completion time', max_length=128,
blank=True, null=True, help_text='(e.g., "2.5 years" or "12 months")')
num_students = models.PositiveIntegerField(
'Approximate number of students completing program annually',
blank=True, null=True, help_text="""Enter student headcounts instead of
FTE. We recommend referring to Integrated Postsecondary Education Data
System (IPEDS) data and including an average over five years.""")
distance = models.CharField(
'Distance Education', max_length=20,
choices=DISTANCE_CHOICES, blank=True, null=True)
commitment = models.Ch | arFi | eld(
'Commitment', max_length=20,
choices=COMMITMENT_CHOICES, blank=True, null=True)
objects = ContentTypeManager()
class Meta:
verbose_name = 'Academic Program'
verbose_name_plural = 'Academic Programs'
@classmethod
def label_overrides(cls):
return {
'title': 'Program Name',
'description': 'Description',
'author': 'Presenter',
'authors': 'Presenters',
'date_created': 'Year Founded',
}
@classmethod
def required_field_overrides(cls):
required_list = super(AcademicProgram, cls).required_field_overrides()
required_list.append('disciplines')
return required_list
@classmethod
def get_custom_filterset(cls):
from ...browse.filterset import AcademicBrowseFilterSet
return AcademicBrowseFilterSet
@classmethod
def required_metadata(cls):
return {
'website': {'max': 5, 'min': 1}, # required, up to 5
}
@classmethod
def preset_topics(cls):
"""
Require "Curriculum" in topics
"""
return [SustainabilityTopic.objects.get(name="Curriculum")]
class AcademicProgramIndex(BaseIndex):
def get_model(self):
return AcademicProgram
|
lsaffre/djangosite | djangosite/models.py | Python | bsd-2-clause | 1,293 | 0.001547 | # -*- coding: UTF-8 -*-
# Copyright 2013-2014 by Luc Saffre.
# License: BSD, see LICENSE for more details.
"""This module is based on Ross McFarland idea to simply send the
server startup signal "at the end of your last app's models.py file"
in his post `Django Startup Signal (Sun | 24 June 2012)
<http://www.xormedia.com/django-startup-signal/>`_.
This adds a subtle hack to also cope with postponed imports. If there
are postponed apps, then :mod:`djangosite.models` must itself raise an
`ImportError` so that it get | s itself postponed and imported another
time.
Note that `loading.cache.postponed` contains all postponed imports
even if they succeeded at the second attempt.
"""
# cannot use logging here because it causes duplicate logger setup
# in certain situations.
# import logging
# logger = logging.getLogger(__name__)
# import sys
from djangosite import AFTER17, startup
if not AFTER17:
from django.db.models import loading
if len(loading.cache.postponed) > 0:
# i.e. if this is the first time
if not 'djangosite' in loading.cache.postponed:
msg = "Waiting for postponed apps (%s) to import" % \
loading.cache.postponed
# logging.info("20140227 " + msg)
raise ImportError(msg)
startup()
|
nomuna/opencv_tesseract | ocr_on_cropped.py | Python | mit | 780 | 0.010256 | import cv2
import cv2.cv as cv
import tesseract
cv.NamedWindow("win")
img = cv2.imread("GBIAe.jpg") # numpy.ndarray
height, width, channels = img.shape
# crop the image
crop = (2*height/3, width/3)
roi = img[crop | [0]:height, crop[1]:2*width/3]
# Convert the cropped area, which is a numpy.ndarray, to cv2.cv.iplimage
bitmap = cv.CreateImageHeader((roi.shape[1], roi.shape[0]), cv.IPL_DEPTH_8U, 3)
cv.SetData(bitmap, roi.tostring(), roi.dtype.itemsize * 3 * roi.shape[1] )
# Extract the text with te | sseract
api = tesseract.TessBaseAPI()
api.Init(".","eng", tesseract.OEM_DEFAULT)
api.SetPageSegMode(tesseract.PSM_AUTO)
tesseract.SetCvImage( bitmap, api)
text=api.GetUTF8Text()
conf=api.MeanTextConf()
print("text %s" % text)
api.End()
cv.ShowImage("win", bitmap)
cv.WaitKey()
|
depboy/p2pool-depboy | p2pool/bitcoin/networks/myriad_groestl.py | Python | gpl-3.0 | 1,216 | 0.006579 | import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
P2P_PREFIX = 'af4576ee'.decode('hex')
P2P_PORT = 10888
ADDRESS_VERSION = 50
RPC_PORT = 10889
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
'myriadcoinaddress' in (yield bitcoind.rpc_help()) and
not (yield bitcoind.rpc_getinfo())['testnet']
))
SUBSIDY_FUNC | = lambda height: 1000*100000000 >> (height + 1)//967680
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('groestl_hash').getPoWHash(data))
BLOCK_PERIOD = 150 # s
SYMBOL = 'MYR'
CONF_FILE_FUNC=lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'myriadcoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library | /Application Support/myriadcoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.myriadcoin'), 'myriadcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://birdonwheels5.no-ip.org/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://birdonwheels5.no-ip.org/address/'
TX_EXPLORER_URL_PREFIX = 'http://birdonwheels5.no-ip.org/tx/'
SANE_TARGET_RANGE=(2**256//2**32//1000 - 1, 2**256//2**27 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 0.001e8
|
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractOrtatranslationsBlogspotCom.py | Python | bsd-3-clause | 570 | 0.033333 |
def extractOrtatranslationsBlogspotCom(item):
'''
Parser for 'ortatranslations.blogspot. | com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPos | tfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
MauHernandez/cyclope | cyclope/widgets.py | Python | gpl-3.0 | 7,445 | 0.002284 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 Código Sur Sociedad Civil.
# All rights reserved.
#
# This file is part of Cyclope.
#
# Cyclope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cyclope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
widgets
-------
"""
import re
from django import forms
from django.conf import settings
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.utils.html import escape
from django.utils.text import truncate_words
from django.utils.html import conditional_escape
from django.utils.encoding import force_unicode
from django.core.urlresolvers import reverse
from django.contrib.admin.widgets import ForeignKeyRawIdWidget, AdminTextareaWidget
from markitup.widgets import AdminMarkItUpWidget, MarkItUpWidget
from cyclope import settings as cyc_settings
def get_default_text_widget():
if cyc_settings.CYCLOPE_TEXT_STYLE == 'textile':
widget = FBAdminMarkItUpWidget
elif cyc_settings.CYCLOPE_TEXT_STYLE == 'wysiwyg':
widget = CKEditor
else:
widget = AdminTextareaWidget
return widget
class CKEditor(forms.Textarea):
"""
Widget providing CKEditor for Rich Text Editing.
"""
class Media:
js = ('ckeditor/ckeditor.js', )
def render(self, name, value, attrs={}):
language = settings.LANGUAGE_CODE[:2]
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(u'''<textarea%s>%s</textarea>
<script type="text/javascript">
CKEDITOR.replace("%s",
{
toolbar : // http://docs.cksource.com/CKEditor_3.x/Developers_Guide/Toolbar
[
| ['Cut','Copy','Paste','PasteText'],
['Undo','Redo','-','Find','Replace','-','SelectAll','RemoveFormat'],
['BidiLtr', 'BidiRtl'],
'/',
['Bold','Italic','Underline','Strike','-','Subscript','Superscript'],
['NumberedList','BulletedList','-','Outdent','Indent','Blockquote'],
['JustifyLeft','Justif | yCenter','JustifyRight','JustifyBlock'],
['Link','Unlink'],
['Image','Flash','Table','HorizontalRule'],
'/',
['Styles','Format','Font','FontSize'],
['TextColor','BGColor']
],
skin: "v2",
height:"291",
width:"618",
filebrowserUploadUrl : "%s",
filebrowserBrowseUrl : "%s",
language : "%s",
}
);
// Customizing dialogs
CKEDITOR.on( 'dialogDefinition', function( ev ){
var dialogName = ev.data.name;
var dialogDefinition = ev.data.definition;
if ( dialogName == 'link' )
{
dialogDefinition.removeContents( 'advanced' );
dialogDefinition.removeContents( 'upload' );
}
if ( dialogName == 'image' )
{
dialogDefinition.removeContents( 'advanced' );
dialogDefinition.removeContents( 'Upload' );
}
if ( dialogName == 'flash' )
{
dialogDefinition.removeContents( 'advanced' );
dialogDefinition.removeContents( 'Upload' );
}
});
</script>''' % (flatatt(final_attrs),
conditional_escape(force_unicode(value)),
final_attrs['id'],
"/", # FIXME http://docs.cksource.com/CKEditor_3.x/Developers_Guide/File_Browser_%28Uploader%29
reverse('fb_browse')+'?pop=3', # pop=3 is CKEditor
language))
class MultipleWidget(forms.Widget):
"""
Widget formed by multiple fields. Use with MultipleField.
"""
def __init__(self, fields, *args, **kwargs):
self.fields = fields
super(MultipleWidget, self).__init__(*args, **kwargs)
self._field_regexp = re.compile("multiple_(.*)")
def render(self, name, value, *args, **kwargs):
if value is None or value is u"":
value = {}
out_names = ['%s_multiple_%s' % (name, field_name) for field_name in self.fields.keys()]
out = []
out.append("<fieldset class='module'>")
#out.append('<a class="collapse-toggle" href="#"> Show </a>')
field_names, fields = self.fields.iterkeys(), self.fields.itervalues()
for field_name, field, out_name in zip(field_names, fields, out_names):
out.append("<div class='form-row'>")
out.append(unicode(field.label)+u": " )
out.append(field.widget.render(out_name, value.get(field_name)))
out.append("</div>")
out.append("</fieldset>")
return mark_safe(u"<div id='%s_multiple'>" % name + u'\n'.join(out) +u"</div>")
def value_from_datadict(self, data, files, name):
field_out_names = [(field_name, '%s_multiple_%s' % (name, field_name)) for field_name in self.fields.keys()]
values = {}
for field_name, out_name in field_out_names:
if type(self.fields[field_name]) == forms.fields.MultipleChoiceField:
values[field_name] = data.getlist(out_name)
else:
values[field_name] = data.get(out_name)
return values
class FBAdminMarkItUpWidget(AdminMarkItUpWidget):
def render(self, name, value, attrs=None):
html = super(MarkItUpWidget, self).render(name, value, attrs)
if self.auto_preview:
auto_preview = "$('a[title=\"Preview\"]').trigger('mouseup');"
else: auto_preview = ''
html += ('<script type="text/javascript">'
'(function($) { '
'$(document).ready(function() {'
' $("#%(id)s").markItUp(mySettings);'
' %(auto_preview)s '
'FileBrowserHelper.insertPicture("%(id)s");'
'});'
'})(jQuery);'
'</script>' % {'id': attrs['id'],
'auto_preview': auto_preview })
return mark_safe(html)
class Media:
js = (
cyc_settings.CYCLOPE_JQUERY_UI_PATH,
'/media_widget/jsi18n/',
'media_widget/cyclope_media_widget.js',
)
css = {
'all': (cyc_settings.CYCLOPE_JQUERY_UI_CSS_PATH,)
}
|
caioserra/apiAdwords | examples/adspygoogle/dfp/v201308/update_teams.py | Python | apache-2.0 | 2,513 | 0.006367 | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example updates teams by adding an ad unit to the first 5.
To determine which teams exist, run get_all_teams.py. To determine which ad
units exist, run get_all_ad_units.py
Tags: TeamService.getTeamsByStatement, TeamService.updateTeams
"""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.common import Utils
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
team_service = client.GetService('TeamService', version='v201308')
# Set the ID of the ad unit to add to the teams.
ad_unit_id = 'INSERT_AD_UNIT_ID_HERE'
# Create a statement to select first 5 teams that aren't built-in.
filter_statement = {'query': 'WHERE id > 0 LIMIT 5'}
# Get teams by statement.
response = team_service.GetTeamsByStatement(filter_statement)[0]
teams = []
if 'results' in response:
teams = response['results']
if teams:
# Update each local team object by adding the ad unit to it.
for team in teams:
ad_unit_ids = []
if 'adUnitIds' in team:
ad_unit_ids = team['adUnitIds']
# Don't add the ad unit if the team has all inventory already.
if not Utils.BoolTypeConvert(team['hasAllInventory']):
ad_unit_ids.append(ad_unit_id)
team['adUnitIds'] | = ad_unit_ids
# Update teams on the server.
teams = team_service.UpdateTeams(teams)
# Display results.
if teams:
for team | in teams:
print ('Team with id \'%s\' and name \'%s\' was updated.'
% (team['id'], team['name']))
else:
print 'No teams were updated.'
else:
print 'No teams found to update.'
|
dpazel/music_rep | tests/transformation_tests/functions_tests/pitchfunctions_tests/test_general_pitch_function.py | Python | mit | 1,586 | 0.000631 | import logging
import sys
import unittest
from tonalmodel.diatonic_pitch import DiatonicPitch
from transformation.functions.pitchfunctions.general_ | pitch_function import GeneralPitchFunction
class TestGeneralPitchFunction(unittest.TestCase):
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
def setUp(self):
pass
def tearDown | (self):
pass
def test_simple_pitch_function(self):
p_map = {'A:7': 'Ab:7', 'Bb:6': 'B:6', 'Db:5': 'D:5'}
gpf = GeneralPitchFunction(p_map)
assert DiatonicPitch.parse('Ab:7') == gpf['A:7']
assert DiatonicPitch.parse('B:6') == gpf['Bb:6']
assert DiatonicPitch.parse('D:5') == gpf['Db:5']
print(gpf)
# Test assignment
gpf['Db:5'] = 'Ab:7'
assert DiatonicPitch.parse('Ab:7') == gpf['A:7']
assert DiatonicPitch.parse('B:6') == gpf['Bb:6']
assert DiatonicPitch.parse('Ab:7') == gpf['Db:5']
print(gpf)
def test_none_setting(self):
p_map = {'A:7': 'Ab:7', 'Bb:6': 'B:6', 'Db:5': None}
gpf = GeneralPitchFunction(p_map)
assert DiatonicPitch.parse('Ab:7') == gpf['A:7']
assert DiatonicPitch.parse('B:6') == gpf['Bb:6']
assert gpf['Db:5'] is None
gpf['A:7'] = None
assert gpf['A:7'] is None
assert DiatonicPitch.parse('B:6') == gpf['Bb:6']
assert gpf['Db:5'] is None
print(gpf)
gpf['A:7'] = None
assert gpf['A:7'] is None
assert DiatonicPitch.parse('B:6') == gpf['Bb:6']
assert gpf['Db:5'] is None
|
thiagopena/djangoSIGE | djangosige/apps/financeiro/models/__init__.py | Python | mit | 72 | 0 | # -*- coding: utf-8 -*- |
from .lancamen | to import *
from .plano import *
|
utarsuno/urbtek | universal_code/debugging.py | Python | apache-2.0 | 2,265 | 0.026932 | #!/usr/bin/env python3
# coding=utf-8
"""
This module, debugging.py, will contain code related to debugging (such as printing error messages).
"""
#import sys
#sys.path.insert(0, '/home/dev_usr/urbtek')
#from universal_code import system_operations as so
class MyException(Exception):
"""
Just something useful to have to throw some of my own custom exception.
"""
pass
class ParameterException(Exception):
"""
A custom exception for when a function receives bad parameter data.
"""
def __init__(self, message):
super(ParameterException, self).__init__(message)
class AbstractMethodNotImplementedException(Exception):
"""
A custom exception for when a function gets called that hasn't been set in a child class.
"""
def __init(self, message):
super(AbstractMethodNotImplementedException, self).__init__(message)
def raise_exception(exception, message):
raise exception(message)
TCP_LOCAL_HOST = 'tcp://127.0.0.1:'
LOCAL_HOST = '127.0.0.1'
NEXUS_DEV_RECEIVE_PORT = 40000
NEXUS_DEV_MANUAL_COMMUNICATION_PORT = 40001
NEXUS_DEV_AUTOMATED_COMMUNICATION_PORT = 40002
starting_port = NEXUS_DEV_AUTOMATED_COMMUNICATION_PORT + 1
def get_a_free_port():
global starting_port
# We can assume ports are free because ports above 30000 have been sealed off.
# TODO: THIS WILL BREAK WHEN MORE THAN DEV EXISTS.
starting_port += 1
return starting_port - 1
# Terminal font coloring and styling.
class TextColors:
HEADER = '\033[95m'
OK_BLUE = '\033[94m'
OK_GREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def print_text_with_color(text, color, | end=None):
if end is None:
print(color + text + TextColors.ENDC + '\n')
else:
print(color + text + TextColors.ENDC, end='')
def terminate(termination_message=''):
if termination_message is '':
print_text_with_color('Program termination has been initiated, good bye!', TextColors.FAIL)
else:
print_text_with_color(termination_message, TextColors.WARNING, '')
if not termination_message.endswith('.'):
print_text_with_color('. The program will now terminate.', TextColors.FAIL)
else: |
print_text_with_color(' The program will now terminate.', TextColors.FAIL)
exit()
|
messente/messente-python | messente/api/sms/api/delivery.py | Python | apache-2.0 | 1,647 | 0 | # -*- coding: utf-8 -*-
# Copyright 2016 Messente Communications OÜ
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from messente.api.sms.api import api
from messente.api.sms.api.error import ERROR_CODES
from messente.api.sms.api.response import Response
error_map = ERROR_CODES.copy()
error_map.update({
"FAILED 102": " ".join([
"No delivery r | eport yet, try again in 5 seconds"
]),
})
class DeliveryResponse(Response):
def __init__(self, *args, **kwargs):
Response.__init__(self, *args, **kwargs)
def _get_error_map(self):
return error_map
def get_result(self):
return self.status_text
class DeliveryAPI(api.API):
"""
Documentation:
http://messente.com/documentation/sms-messaging/delivery-repo | rt
"""
def __init__(self, **kwargs):
api.API.__init__(self, "delivery", **kwargs)
def get_dlr_response(self, sms_id):
r = DeliveryResponse(
self.call_api("get_dlr_response", dict(sms_unique_id=sms_id))
)
self.log_response(r)
return r
def get_report(self, sms_id):
return self.get_dlr_response(sms_id)
|
hivesolutions/netius | src/netius/middleware/annoyer.py | Python | apache-2.0 | 2,721 | 0.004781 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Netius System
# Copyright (c) 2008-2020 Hive Solutions Lda.
#
# This file is part of Hive Netius System.
#
# Hive Netius System is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Netius System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Netius System. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <joamag@hive.pt>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2020 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import sys
import time
import datetime
import threading
import netius
from .base import Middleware
class AnnoyerMiddleware(Middleware):
"""
Simple middleware that prints an "annoying" status message
to the standard output (stdout) from time to time providing
| a simple diagnostics strategy.
"""
def __init__(self, owner, period = 10.0):
Middleware.__init__(self, owner)
self.period = period
self._initial = None
self._thread = None
| self._running = False
def start(self):
Middleware.start(self)
self.period = netius.conf("ANNOYER_PERIOD", self.period, cast = float)
self._thread = threading.Thread(target = self._run)
self._thread.start()
def stop(self):
Middleware.stop(self)
if self._thread:
self._running = False
self._thread.join()
self._thread = None
def _run(self):
self._initial = datetime.datetime.utcnow()
self._running = True
while self._running:
delta = datetime.datetime.utcnow() - self._initial
delta_s = self.owner._format_delta(delta)
message = "Uptime => %s | Connections => %d\n" %\
(delta_s, len(self.owner.connections))
sys.stdout.write(message)
sys.stdout.flush()
time.sleep(self.period)
|
Shaswat27/sympy | sympy/core/compatibility.py | Python | bsd-3-clause | 31,587 | 0.001646 | """
Reimplementations of constructs introduced in later versions of Python than
we support. Also some functions that are needed SymPy-wide and are located
here for easy import.
"""
from __future__ import print_function, division
import operator
from collections import defaultdict
from sympy.external import import_module
"""
Python 2 and Python 3 compatible imports
String and Unicode compatible changes:
* `unicode()` removed in Python 3, import `unicode` for Python 2/3
compatible function
* `unichr()` removed in Python 3, import `unichr` for Python 2/3 compatible
function
* Use `u()` for escaped unicode sequences (e.g. u'\u2020' -> u('\u2020'))
* Use `u_decode()` to decode utf-8 formatted unicode strings
* `string_types` gives str in Python 3, unicode and str in Python 2,
equivalent to basestring
Integer related changes:
* `long()` removed in Python 3, import `long` for Python 2/3 compatible
function
* `integer_types` gives int in Python 3, int and long in Python 2
Types related changes:
* `class_types` gives type in Python 3, type and ClassType in Python 2
Renamed function attributes:
* Python 2 `.func_code`, Python 3 `.__func__`, access with
`get_function_code()`
* Python 2 `.func_globals`, Python 3 `.__globals__`, access with
`get_function_globals()`
* Python 2 `.func_name`, Python 3 `.__name__`, access with
`get_function_name()`
Moved modules:
* `reduce()`
* `StringIO()`
* `cStringIO()` (same as `StingIO()` in Python 3)
* Python 2 `__builtins__`, access with Python 3 name, `builtins`
Iterator/list changes:
* `xrange` removed in Python 3, import `xrange` for Python 2/3 compatible
iterator version of range
exec:
* Use `exec_()`, with parameters `exec_(code, globs=None, locs=None)`
Metaclasses:
* Use `with_metaclass()`, examples below
* Define class `Foo` with metaclass `Meta`, and no parent:
class Foo(with_metaclass(Meta)):
pass
* Define class `Foo` with metaclass `Meta` and parent class `Bar`:
class Foo(with_metaclass(Meta, Bar)):
pass
"""
import sys
PY3 = sys.version_info[0] > 2
if PY3:
class_types = type,
integer_types = (int,)
string_types = (str,)
long = int
# String / unicode compatibility
unicode = str
unichr = chr
def u(x):
return x
def u_decode(x):
return x
Iterator = object
# Moved definitions
get_function_code = operator.attrgetter("__code__")
get_function_globals = operator.attrgetter("__globals__")
get_function_name = operator.attrgetter("__name__")
import builtins
from functools import reduce
from io import StringIO
cStringIO = StringIO
exec_=getattr(builtins, "exec")
range=range
else:
import codecs
import types
class_types = (type, types.ClassType)
integer_types = (int, long)
string_types = (str, unicode)
long = long
# String / unicode compatibility
unicode = unicode
unichr = unichr
def u(x):
return codecs.unicode_escape_decode(x)[0]
def u_decode(x):
return x.decode('utf-8')
class Iterator(object):
def next(self):
return type(self).__next__(self)
# Moved definitions
get_function_code = operator.attrgetter("func_code")
get_function_globals = operator.attrgetter("func_globals")
get_function_name = operator.attrgetter("func_name")
import __builtin__ as builtins
reduce = reduce
from StringIO import StringIO
from cStringIO import StringIO as cStringIO
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("exec _code_ in _globs_, _locs_")
range=xrange
def with_metaclass(meta, *bases):
"""
Create a base class with a metaclass.
For example, if you have the metaclass
>>> class Meta(type):
... pass
Use this as the metaclass by doing
>>> from sympy.core.compatibility import with_metaclass
>>> class MyClass(with_metaclass(Meta, object)):
... pass
This is equivalent to the Python 2::
class MyClass(object):
__metaclass__ = Meta
or Python 3::
class MyClass(object, metaclass=Meta):
pass
That is, the first argument is the metaclass, and the remaining arguments
are the base classes. Note that if the base class is just ``object``, you
may omit it.
>>> MyClass.__mro__
(<class 'MyClass'>, <... 'object'>)
>>> type(MyClass)
<class 'Meta'>
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
# Code copied from the 'six' library.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, "NewBase", (), {})
# These are in here because telling if something is an iterable just by calling
# hasattr(obj, "__iter__") behaves differently in Python 2 and Python 3. In
# particular, hasattr(str, "__iter__") is False in Python 2 and True in Python 3.
# I think putting them here also makes it easier to use them in the core.
class NotIterable:
"""
Use this as mixin when creating a class which is not supposed to return
true when iterable() is called on its instances. I.e. avoid infinite loop
when calling e.g. list() on the instance
"""
pass
def iterable(i, exclude=(string_types, dict, NotIterable)):
"""
Return a boolean indicating whether ``i`` is SymPy iterable.
True also indicates that the iterator is finite, i.e. you e.g.
call list(...) on the instance.
When SymPy is working with iterables, it is almost always assuming
that the iterable is not a string or a mapping, so those are excluded
by default. If you want a pure Python definition, make exclude=None. To
| exclude multiple items, pass them as a tuple.
You can | also set the _iterable attribute to True or False on your class,
which will override the checks here, including the exclude test.
As a rule of thumb, some SymPy functions use this to check if they should
recursively map over an object. If an object is technically iterable in
the Python sense but does not desire this behavior (e.g., because its
iteration is not finite, or because iteration might induce an unwanted
computation), it should disable it by setting the _iterable attribute to False.
See also: is_sequence
Examples
========
>>> from sympy.utilities.iterables import iterable
>>> from sympy import Tuple
>>> things = [[1], (1,), set([1]), Tuple(1), (j for j in [1, 2]), {1:2}, '1', 1]
>>> for i in things:
... print('%s %s' % (iterable(i), type(i)))
True <... 'list'>
True <... 'tuple'>
True <... 'set'>
True <class 'sympy.core.containers.Tuple'>
True <... 'generator'>
False <... 'dict'>
False <... 'str'>
False <... 'int'>
>>> iterable({}, exclude=None)
True
>>> iterable({}, exclude=str)
True
>>> iterable("no", exclude=str)
False
"""
if hasattr(i, '_iterable'):
return i._iterable
try:
iter(i)
except TypeError:
return False
if exclude:
return not isinstance(i, exclude)
return True
def is_sequence(i, include=None):
"""
Return a boolean indicating whether ``i`` is a sequence in the SymPy
sense. If anything that fails the test below should be included as
being a sequence for your application, set 'include' to that object's
type; multiple types should be passed as a tuple of types.
Note: although generators can generate a sequence, they often need special
handling to make sure their elements |
8acs2016/All-Terrain-Life-Vest | code.py | Python | apache-2.0 | 762 | 0.018373 | # All-Te | rrain-Life-Vest
All Terrain Life Vest- IEA Raspverry Pi Competition Entry
# Description
import RPi.GPIO as GPIO
import time
import os
GPIO.setmode (GPIO.BCM)
GPIO.cleanup()
GPIO.setwarnings(False)
GPIO.setup(17,GPIO.OUT)
GPIO.setup(04,GPIO.OUT)
GPIO.setup(22, GPIO.IN)
print("---------------")
print("Button+GPIO")
print("---------------")
print GPIO.input(22)
while True:
if(GPIO.input(22)==False):
GPIO.output(17,GPIO.HIGH)
GPIO.output(04,GPIO.HIGH)
| print("air bag activated")
os.system('date')
print GPIO.input(22)
time.sleep(1)
GPIO.output(17,GPIO.LOW)
GPIO.output(04,GPIO.LOW)
else:
os.system('clear')
print("air bag NOT activated")
time.sleep(1)
|
arkharin/OpenCool | scr/logic/components/expansion_valve/theoretical.py | Python | mpl-2.0 | 1,392 | 0.001437 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Define the Expansion Valve component.
"""
from scr.logic.components.component import Component as Cmp
from scr.logic.components.component import ComponentInfo as CmpInfo
from scr.logic.components.component import component, fundamental_equation
def update_saved_data_to_last_version(orig_data, orig_version):
return orig_data
@component('theoretical_expansion_valve', CmpInfo.EXPANSION_VALVE, 1, update_saved_data_to_last_version)
class Theoretical(Cmp):
def __init__(self, id_, inlet_nodes_id, outlet_nodes_id, component_dat | a):
super().__init__(id_, inlet_nodes_id, outlet_nodes_id, component_data)
""" Fundamental properties equations """
@fundamental_equation()
# function name can be arbitrary. Return a single vector with each side of the equation evaluated.
def _eval_intrinsic_equations(self):
id_inlet_node = self.get_id_inlet_nodes()[0]
inlet_node = self.get_inlet_no | de(id_inlet_node)
id_outlet_node = self.get_id_outlet_nodes()[0]
outlet_node = self.get_outlet_node(id_outlet_node)
h_in = inlet_node.enthalpy()
h_out = outlet_node.enthalpy()
return [h_in / 1000.0, h_out / 1000.0]
|
antont/tundra | src/Application/PythonScriptModule/pymodules_old/webserver/webcontroller.py | Python | apache-2.0 | 5,782 | 0.008302 | """a non-blocking, non-threaded non-multiprocessing circuits web server"""
import time #timestamping images
import datetime #showing human readable time on render page
import os
import rexviewer as r
import naali
try:
import circuits
except ImportError: #not running within the viewer, but testing outside it
import sys
sys.path.append('..')
from circuits.web import Controller, Server, Static
#for camera rotating
import PythonQt.QtGui
from PythonQt.QtGui import QQuaternion as Quat
from PythonQt.QtGui import QVector3D as Vec
import mathutils as mu
PORT = 28008
#__file__ doesn't work in embedded context, but os.getcwd() helps
OWNPATH = os.getcwd() + "/pymodules/webserver/"
SHOTPATH = OWNPATH + "screenshot/"
#print "WEB PATH for images:", PATH
class WebServer(Server):
"""this is the component that autoload registers to the viewer"""
def __init__(self):
Server.__init__(self, "0.0.0.0:%d" % PORT) #"localhost", PORT)
self + WebController() + Static(docroot=SHOTPATH)
#the first version with relative controls to the cam
relhtml = """\
<html>
<head>
<title>Naali web ui</title>
</head>
<body>
<h1>Naali</h1>
<form action="camcontrol" method="GET">
<p>rotate:<br/>
<input type="s | ubmit" name="rotate" value="10"/>
<input type="submit" name="rotate" value="-10"/>
</p>
<p>move:<br/
<input type="submit" name="move" value="+1"/><br>
<input type="submit" name="move" value="-1"/>
</p>
</form>
<img src="%s"/>
</body>
</html>"""
#second version where webui gives absolute pos&ort for the cam, | so each user has own on client side
abshtml = open(OWNPATH + "webui.html").read()
def save_screenshot():
rend = naali.renderer
rend.HideCurrentWorldView()
rend.Render()
imgname = "image-%s.png" % time.time()
r.takeScreenshot(SHOTPATH, imgname)
rend.ShowCurrentWorldView()
baseurl = "/"
#baseurl = "http://www.playsign.fi:28080/"
return baseurl, imgname
class WebController(Controller):
def index(self):
return self.serve_file(OWNPATH + "naali.html")
def hello(self):
return "Hello World!"
def camcontrol(self, rotate=None, move=None):
cament = naali.getCamera()
p = cament.placeable
#print p.position, p.orientation
if rotate is not None:
ort = p.orientation
rot = Quat.fromAxisAndAngle(Vec(0, 1, 0), float(rotate))
ort *= rot
p.orientation = ort
if move is not None:
pos = p.position
pos += Vec(float(move), 0, 0)
p.position = pos
baseurl, imgname = save_screenshot()
imgurl = baseurl + imgname
#return "%s, %s" % (p.position, p.orientation)
return relhtml % imgurl
def render(self, camposx=None, camposy=None, camposz=None, camang=None):
#, camortx=None, camorty=None, camortz=None, camortw=None):
cament = naali.getCamera()
p = cament.placeable
if camposx is not None:
pos = Vec(*(float(v) for v in [camposx, camposy, camposz]))
p.position = pos
if camang is not None:
ort = p.orientation
start = Quat(0, 0, -0.707, -0.707)
rot = Quat.fromAxisAndAngle(Vec(0, 1, 0), -float(camang))
new = start * rot
p.orientation = new
#if camortx is not None:
# ort = Quat(*(float(v) for v in [camortw, camortx, camorty, camortz]))
# p.orientation = ort
#return str(p.position), str(p.orientation) #self.render1()
baseurl, imgname = save_screenshot()
imgurl = baseurl + imgname
pos = p.position
ort = p.orientation
#vec, ang = toAngleAxis(p.orientation)
#print vec, ang
euler = mu.quat_to_euler(ort)
ang = euler[0]
if ang < 0:
ang = 360 + ang
return abshtml % (imgurl,
ang,
pos.x(), pos.y(), pos.z()
#ort.scalar(), ort.x(), ort.y(), ort.z(),
)
def _renderimgurl(self, camposx=None, camposy=None, camposz=None, camortx=None, camorty=None, camortz=None, camortw=None):
cament = naali.getCamera()
p = cament.placeable
orgpos = Vec(0, 0, 0)
orgort = Quat(1, 0, 0, 0)
if camposx is not None:
pos = Vec(*(float(v) for v in [camposx, camposy, camposz]))
p.position = pos
if camortx is not None:
ort = Quat(*(float(v) for v in [camortw, camortx, camorty, camortz]))
p.orientation = ort
baseurl, imgname = save_screenshot()
p.position = orgpos
p.orientation = orgort
return baseurl, imgname
def renderimgurl(self, camposx=None, camposy=None, camposz=None, camortx=None, camorty=None, camortz=None, camortw=None):
baseurl, imgname = self._renderimgurl(camposx, camposy, camposz, camortx, camorty, camortz, camortw)
return baseurl + imgname
def renderimg(self, camposx=None, camposy=None, camposz=None, camortx=None, camorty=None, camortz=None, camortw=None):
_, imgname = self._renderimgurl(camposx, camposy, camposz, camortx, camorty, camortz, camortw)
return self.serve_file(SHOTPATH + imgname)
def render1(self, campos=None, camort=None):
timestr = datetime.datetime.today().isoformat()
baseurl, imgname = save_screenshot()
imgurl = baseurl + imgname
return """
<h1>Realxtend Naali viewer</h1>
<h2>at %s</h2>
<img src="%s"/>
""" % (timestr, imgurl)
#~ if __name__ == '__main__':
#~ while 1:
#~ print ".",
|
geradcoles/random-name | randomname/lists/names_male.py | Python | apache-2.0 | 28,040 | 0.094009 | WORDS = (
'Aaron',
'Abdul',
'Abe',
'Abel',
'Abraham',
'Abram',
'Adalberto',
'Adam',
'Adan',
'Adolfo',
'Adolph',
'Adrian',
'Agustin',
'Ahmad',
'Ahmed',
'Al',
'Alan',
'Albert',
'Alberto',
'Alden',
'Aldo',
'Alec',
'Alejandro',
'Alex',
'Alexander',
'Alexis',
'Alfonso',
'Alfonzo',
'Alfred',
'Alfredo',
'Ali',
'Allan',
'Allen',
'Alonso',
'Alonzo',
'Alphonse',
'Alphonso',
'Alton',
'Alva',
'Alvaro',
'Alvin',
'Amado',
'Ambrose',
'Amos',
'Anderson',
'Andre',
'Andrea',
'Andreas',
'Andres',
'Andrew',
'Andy',
'Angel',
'Angelo',
'Anibal',
'Anthony',
'Antione',
'Antoine',
'Anton',
'Antone',
'Antonia',
'Antonio',
'Antony',
'Antwan',
'Archie',
'Arden',
'Ariel',
'Arlen',
'Arlie',
'Armand',
'Armando',
'Arnold',
'Arnoldo',
'Arnulfo',
'Aron',
'Arron',
'Art',
'Arthur',
'Arturo',
'Asa',
'Ashley',
'Aubrey',
'August',
'Augustine',
'Augustus',
'Aurelio',
'Austin',
'Avery',
'Barney',
'Barrett',
'Barry',
'Bart',
'Barton',
'Basil',
'Beau',
'Ben',
'Benedict',
'Benito',
'Benjamin',
'Bennett',
'Bennie',
'Benny',
'Benton',
'Bernard',
'Bernardo',
'Bernie',
'Berry',
'Bert',
'Bertram',
'Bill',
'Billie',
'Billy',
'Blaine',
'Blair',
'Blake',
'Bo',
'Bob',
'Bobbie',
'Bobby',
'Booker',
'Boris',
'Boyce',
'Boyd',
'Brad',
'Bradford',
'Bradley',
'Bradly',
'Brady',
'Brain',
'Branden',
'Brandon',
'Brant',
'Brendan',
'Brendon',
'Brent',
'Brenton',
'Bret',
'Brett',
'Brian',
'Brice',
'Britt',
'Brock',
'Broderick',
'Brooks',
'Bruce',
'Bruno',
'Bryan',
'Bryant',
'Bryce',
'Bryon',
'Buck',
'Bud',
'Buddy',
'Buford',
'Burl',
'Burt',
'Burton',
'Buster',
'Byron',
'Caleb',
'Calvin',
'Cameron',
'Carey',
'Carl',
'Carlo',
'Carlos',
'Carlton',
'Carmelo',
'Carmen',
'Carmine',
'Carol',
'Carrol',
'Carroll',
'Carson',
'Carter',
'Cary',
'Casey',
'Cecil',
'Cedric',
'Cedrick',
'Cesar',
'Chad',
'Chadwick',
'Chance',
'Chang',
'Charles',
'Charley',
'Charlie',
'Chas',
'Chase',
'Chauncey',
'Chester',
'Chet',
'Chi',
'Chong',
'Chris',
'Christian',
'Christoper',
'Christopher',
'Chuck',
'Chung',
'Clair',
'Clarence',
'Clark',
'Claud',
'Claude',
'Claudio',
'Clay',
'Clayton',
'Clement',
'Clemente',
'Cleo',
'Cletus',
'Cleveland',
'Cliff',
'Clifford',
'Clifton',
'Clint',
'Clinton',
'Clyde',
'Cody',
'Colby',
'Cole',
'Coleman',
'Colin',
'Collin',
'Colton',
'Columbus',
'Connie',
'Conrad',
'Cordell',
'Corey',
'Cornelius',
'Cornell',
'Cortez',
'Cory',
'Courtney',
'Coy',
'Craig',
'Cristobal',
'Cristopher',
'Cruz',
'Curt',
'Curtis',
'Cyril',
'Cyrus',
'Dale',
'Dallas',
'Dalton',
'Damian',
'Damien',
'Damion',
'Damon',
'Dan',
'Dana',
'Dane',
'Danial',
'Daniel',
'Danilo',
'Dannie',
'Danny',
'Dante',
'Darell',
'Daren',
'Darin',
'Dario',
'Darius',
'Darnell',
'Daron',
'Darrel',
'Darrell',
'Darren',
'Darrick',
'Darrin',
'Darron',
'Darryl',
'Darwin',
'Daryl',
'Dave',
'David',
'Davis',
'Dean',
'Deandre',
'Deangelo',
'Dee',
'Del',
'Delbert',
'Delmar',
'Delmer',
'Demarcus',
'Demetrius',
'Denis',
'Dennis',
'Denny',
'Denver',
'Deon',
'Derek',
'Derick',
'Derrick',
'Deshawn',
'Desmond',
'Devin',
'Devon',
'Dewayne',
'Dewey',
'Dewitt',
'Dexter',
'Dick',
'Diego',
'Dillon',
'Dino',
'Dion',
'Dirk',
'Domenic',
'Domingo',
'Dominic',
'Dominick',
'Dominique',
'Don',
'Donald',
'Dong',
'Donn',
'Donnell',
'Donnie',
'Donny',
'Donovan',
'Donte',
'Dorian',
'Dorsey',
'Doug',
'Douglas',
'Douglass',
'Doyle',
'Drew',
'Duane',
'Dudley',
'Duncan',
'Dustin',
'Dusty',
'Dwain',
'Dwayne',
'Dwight',
'Dylan',
'Earl',
'Earle',
'Earnest',
'Ed',
'Eddie',
'Eddy',
'Edgar',
'Edgardo',
'Edison',
'Edmond',
'Edmund',
'Edmundo',
'Eduardo',
'Edward',
'Edwardo',
'Edwin',
'Efrain',
'Efren',
'Elbert',
'Elden',
'Eldon',
'Eldridge',
'Eli',
'Elias',
'Elijah',
'Eliseo',
'Elisha',
'Elliot',
'Elliott',
'Ellis',
'Ellsworth',
'Elmer',
'Elmo',
'Eloy',
'Elroy',
'Elton',
'Elvin',
'Elvis',
'Elwood',
'Emanuel',
'Emerson',
'Emery',
'Emil',
'Emile',
'Emilio',
'Emmanuel',
'Emmett',
'Emmitt',
'Emory',
'Enoch',
'Enrique',
'Erasmo',
'Eric',
'Erich',
'Erick',
'Erik',
'Erin',
'Ernest',
'Ernesto',
'Ernie',
'Errol',
'Ervin',
'Erwin',
'Esteban',
'Ethan',
'Eugene',
'Eugenio',
'Eusebio',
'Evan',
'Everett',
'Everette',
'Ezekiel',
'Ezequiel',
'Ezra',
'Fabian',
'Faustino',
'Fausto',
'Federico',
'Felipe',
'Felix',
'Felton',
'Ferdinand',
'Fermin',
'Fernando',
'Fidel',
'Filiberto',
'Fletcher',
'Florencio',
'Florentino',
'Floyd',
'Forest',
'Forrest',
'Foster',
'Frances',
'Francesco',
'Francis',
'Francisco',
'Frank',
'Frankie',
'Franklin',
'Franklyn',
'Fred',
'Freddie',
'Freddy',
'Frederic',
'Frederick',
'Fredric',
'Fredrick',
'Freeman',
'Fritz',
'Gabriel',
'Gail',
'Gale',
'Galen',
'Garfield',
'Garland',
'Garret',
'Garrett',
'Garry',
'Garth',
'Gary',
| 'Gaston',
'Gavin',
'Gayle',
'Gaylord',
'Genaro',
'Gene',
'Geoffrey',
'George',
'Gerald',
'Geraldo',
'Gerard',
'Gerardo',
'German',
'Gerry',
'Gil',
'Gilbert',
'Gilberto',
'Gino | ',
'Giovanni',
'Giuseppe',
'Glen',
'Glenn',
'Gonzalo',
'Gordon',
'Grady',
'Graham',
'Graig',
'Grant',
'Granville',
'Greg',
'Gregg',
'Gregorio',
'Gregory',
'Grover',
'Guadalupe',
'Guillermo',
'Gus',
'Gustavo',
'Guy',
'Hai',
'Hal',
'Hank',
'Hans',
'Harlan',
'Harland',
'Harley',
'Harold',
'Harris',
'Harrison',
'Harry',
'Harvey',
'Hassan',
'Hayden',
'Haywood',
'Heath',
'Hector',
'Henry',
'Herb',
'Herbert',
'Heriberto',
'Herman',
'Herschel',
'Hershel',
'Hilario',
'Hilton',
'Hipolito',
'Hiram',
'Hobert',
'Hollis',
'Homer',
'Hong',
'Horace',
'Horacio',
'Hosea',
'Houston',
'Howard',
'Hoyt',
'Hubert',
'Huey',
'Hugh',
'Hugo',
'Humberto',
'Hung',
'Hunter',
'Hyman',
'Ian',
'Ignacio',
'Ike',
'Ira',
'Irvin',
'Irving',
'Irwin',
'Isaac',
'Isaiah',
'Isaias',
'Isiah',
'Isidro',
'Ismael',
'Israel',
'Isreal',
'Issac',
'Ivan',
'Ivory',
'Jacinto',
'Jack',
'Jackie',
'Jackson',
'Jacob',
'Jacques',
'Jae',
'Jaime',
'Jake',
'Jamaal',
'Jamal',
'Jamar',
'Jame',
'Jamel',
'James',
'Jamey',
'Jamie',
'Jamison',
'Jan',
'Jared',
'Jarod',
'Jarred',
'Jarrett',
'Jarrod',
'Jarvis',
'Jason',
'Jasper',
'Javier',
'Jay',
'Jayson',
'Jc',
'Jean',
'Jed',
'Jeff',
'Jefferey',
'Jefferson',
'Jeffery',
'Jeffrey',
'Jeffry',
'Jerald',
'Jeramy',
'Jere',
'Jeremiah',
'Jeremy',
'Jermaine',
'Jerold',
'Jerome',
'Jeromy',
'Jerrell',
'Jerrod',
'Jerrold',
'Jerry',
'Jess',
'Jesse',
'Jessie',
'Jesus',
'Jewel',
'Jewell',
'Jim',
'Jimmie',
'Jimmy',
'Joan',
'Joaquin',
'Jody',
'Joe',
'Joel',
'Joesph',
'Joey',
'John',
'Johnathan',
'Johnathon',
'Johnie',
'Johnnie',
'Johnny',
'Johnson',
'Jon',
'Jonah',
'Jonas',
'Jonathan',
'Jonathon',
'Jordan',
'Jordon',
'Jorge',
'Jose',
'Josef',
'Joseph',
'Josh',
'Joshua',
'Josiah',
'Jospeh',
'Josue',
'Juan',
'Jude',
'Judson',
'Jules',
'Julian',
'Julio',
'Julius',
'Junior',
'Justin',
'Kareem',
'Karl',
'Kasey',
'Keenan',
'Keith',
'Kelley',
'Kelly',
'Kelvin',
'Ken',
'Kendall',
'Kendrick',
'Keneth',
'Kenneth',
'Kennith',
'Kenny',
'Kent',
'Kenton',
'Kermit',
'Kerry',
'Keven',
'Kevin',
'Kieth',
'Kim',
'King',
'Kip',
'Kirby',
'Kirk',
'Korey',
'Kory',
'Kraig',
'Kris',
'Kristofer',
'Kristopher',
'Kurt',
'Kurtis',
'Kyle',
'Lacy',
'Lamar',
'Lamont',
'Lance',
'Landon',
'Lane',
'Lanny',
'Larry',
'Lauren',
'Laurence',
'Lavern',
'Laverne',
'Lawerence',
'Lawrence',
'Lazaro',
'Leandro',
'Lee',
'Leif',
'Leigh',
'Leland',
'Lemuel',
'Len',
'Lenard',
'Lenny',
'Leo',
'Leon',
'Leonard',
'Leonardo',
'Leonel',
'Leopoldo',
'Leroy',
'Les',
'Lesley',
'Leslie',
'Lester',
'Levi',
'Lewis',
'Lincoln',
'Lindsay',
'Lindsey',
'Lino',
'Linwood',
'Lionel',
'Lloyd',
'Logan',
'Lon',
'Long',
'Lonnie',
'Lonny',
'Loren',
'Lorenzo',
'Lou',
'Louie',
'Louis',
'Lowell',
'Loyd',
'Lucas',
'Luciano',
'Lucien',
'Lucio',
'L |
FirstDraftGIS/firstdraft | projfd/appfd/forms.py | Python | apache-2.0 | 726 | 0.012397 | # In forms.py...
from appfd.models import Basemap
from django.forms import CharField, FileField, Form, ModelChoiceField, URLField
from timezone_field import TimeZoneFormField
class | BasemapForm(Form):
basemap = ModelChoiceField(to_field_name="name", queryset=Basemap.objects.all())
class LinkForm(Form):
data = URLField()
class TextForm(Form):
data = CharField()
class FileForm(Form):
data = FileField()
class RequestPossibleAdditionsForm(Form):
name = CharField()
# no | t validating whether token is in correct tokens bc that would slow
# things down too much
token = CharField()
class TimezoneForm(Form):
timezone = TimeZoneFormField()
class TweetForm(Form):
text = CharField()
|
puttarajubr/commcare-hq | corehq/apps/orgs/decorators.py | Python | bsd-3-clause | 1,343 | 0.005957 | from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, Http404
def no_permissions_redirect(request):
np_page = reverse('no_permissions')
return HttpResponseRedirect(np_page) if request.method == 'GET' else HttpResponse("Missing qualifications")
def check_and_set_org(req, org):
from corehq.apps.orgs.models import Organization
organization = Organization.get_by_name(org, strict=True)
if not organization:
raise Http404
req.organization = organization
def org_admin_required(view_func):
def shim(req | uest, org, *args, **kwargs):
check_and_set_org(request, org)
if not hasattr(request, 'couch_user') or not \
(request.couch_user.is_org_admin(org) or request.couch_user.is_superuser): |
return no_permissions_redirect(request)
else:
return view_func(request, org, *args, **kwargs)
return shim
def org_member_required(view_func):
def shim(request, org, *args, **kwargs):
check_and_set_org(request, org)
if not hasattr(request, 'couch_user') or not\
(request.couch_user.is_member_of_org(org) or request.couch_user.is_superuser):
return no_permissions_redirect(request)
else:
return view_func(request, org, *args, **kwargs)
return shim
|
google-research/google-research | generalization_representations_rl_aistats22/minigrid/rl_basics.py | Python | apache-2.0 | 3,853 | 0.008305 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reusable implementation of basic RL algorithms."""
from absl import logging
import numpy as np
def get_state_xy(idx, num_cols):
"""Given state index this method returns its equivalent coordinate (x,y).
Args:
idx: index uniquely identifying a state
num_cols: number of colums
Returns:
values x, y describing the state's location in the grid
"""
y = int(idx % num_cols)
x = int((idx - y) / num_cols)
return x, y
def get_state_idx(x, y, num_cols):
"""Given state (x,y), returns the index that uniquely identifies this state.
Args:
x: value of the coordinate x
y: value of the coordinate y
num_cols: number of colums
Returns:
unique index identifying a position in the grid
"""
idx = y + x * num_cols
return idx
def policy_random(env):
r"""Random policy on env.
Args:
env: a MiniGrid environment, including the MDPWrapper.
Returns:
Numpy array S \times A: random policy
"""
return np.ones((env.num_states, env.num_actions)) / env.num_actions
def policy_eps_suboptimal(env, optimal_policy, epsilon=0):
r"""Epsilon suboptimal policy.
Takes random action with probability epsilon and
optimal action with prob 1 - epsilon on env.
Args:
env: a MiniGrid environment, including the MDPWrapper.
optimal_policy: Numpy array S \times A with optimal policy
| epsilon: float in [0, 1]
Returns:
Numpy array S \times A: policy followed by the agent
"""
return epsilon * policy_random(env) + (1 - epsilon) * optimal_policy
def policy_iteration(env, gamma=0.99, tolerance=1e-5, verbose=False):
"""Run policy iteration on env.
Args:
env: a MiniGrid environment, including the MDPWrapper.
gamma: float, discount factor.
| tolerance: float, evaluation stops when the value function change is less
than the tolerance.
verbose: bool, whether to print verbose messages.
Returns:
Numpy array with V*
"""
values = np.zeros(env.num_states)
# Random policy
policy = np.ones((env.num_states, env.num_actions)) / env.num_actions
policy_stable = False
i = 0
while not policy_stable:
# Policy evaluation
while True:
delta = 0.
for s in range(env.num_states):
v = np.sum(env.rewards[s, :] * policy[s, :] + gamma * policy[s, :] *
np.matmul(env.transition_probs[s, :, :], values))
delta = max(delta, abs(v - values[s]))
values[s] = v
if delta < tolerance:
break
# Policy improvement
policy_stable = True
for s in range(env.num_states):
old = policy[s].copy()
g = np.zeros(env.num_actions, dtype=float)
for a in range(env.num_actions):
g[a] = (
env.rewards[s, a] +
gamma * np.matmul(env.transition_probs[s, a, :], values))
action = np.argmax(g)
for a in range(env.num_actions):
if a == action:
policy[s, a] = 1.
else:
policy[s, a] = 0
if not np.array_equal(policy[s], old):
policy_stable = False
i += 1
if i % 1000 == 0 and verbose:
logging.info('Error after %d iterations: %f', i, delta)
if verbose:
logging.info('Found V* in %d iterations', i)
logging.info(values)
return values, policy
|
hjanime/VisTrails | scripts/get_usersguide.py | Python | bsd-3-clause | 3,866 | 0.008795 | #!/usr/bin/env python
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met | :
##
## - Redistributions of source code must retain the above copyright n | otice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
import os.path
import subprocess
import sys
import urllib2
this_dir = os.path.dirname(os.path.abspath(__file__))
DOWNLOAD_URL = "http://www.vistrails.org/usersguide/dev/html/VisTrails.pdf"
SAVE_TO = os.path.abspath(sys.argv[1]) if len(sys.argv) > 1 else this_dir
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def download_usersguide():
print "Downloading usersguide from", DOWNLOAD_URL
response = urllib2.urlopen(DOWNLOAD_URL)
filename = os.path.join(SAVE_TO,
DOWNLOAD_URL.split('/')[-1])
f = open(filename, 'wb')
f.write(response.read())
f.close()
if __name__ == "__main__":
if which('sphinx-build') is None:
print "Sphinx is not installed!"
download_usersguide()
elif which('pdflatex') is None:
print "pdflatex is not installed!"
download_usersguide()
else:
cwd = os.getcwd()
os.chdir(this_dir)
# Build usersguide
proc = subprocess.Popen(['./build_usersguide.py', SAVE_TO],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
proc.wait()
if proc.returncode != 0:
print "ERROR: building usersguide failed."
if proc.stdout:
print proc.stdout.readlines()
sys.exit(1)
os.chdir(this_dir)
|
lilleswing/deepchem | examples/multiclass/multiclass_sklearn.py | Python | mit | 605 | 0.001653 | import deepchem as dc
import numpy as np
import sklearn
from sklearn.ensemble import RandomForestClassifier
N = 100
n_feat = 5
n_classes = 3
X = np.random.rand(N, n_feat)
y = np.random.randint(3, size=(N,))
dataset = dc.data.NumpyDataset(X, y)
sklearn_model = RandomFo | restClassifier(class_weight="balanced", n_estimators=50)
model = dc.models.SklearnModel(sklearn_model)
# Fit trained model
print("About to fit model")
model.fit(dataset)
model.save()
print("About to evaluate model")
train_scores = | model.evaluate(dataset, sklearn.metrics.roc_auc_score, [])
print("Train scores")
print(train_scores)
|
cysuncn/python | spark/crm/PROC_A_SUBJECT_D003025.py | Python | gpl-3.0 | 2,560 | 0.012851 | #coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_A_SUBJECT_D003025').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
ACRM_F_CI_ASSET_BUSI_PROTO = sqlContext.read.parquet(hdfs+'/ACRM_F_CI_ASSET_BUSI_PROTO/*')
ACRM_F_CI_ASSET_BUSI_PROTO.registerTempTable("ACRM_F_CI_ASSET_BUSI_PROTO")
#任务[21] 001-01::
V_STEP = V_STEP + 1
sql = """
SELECT A.CUST_ID AS CUST_ID
,'' AS ORG_ID
,'D003025' AS INDEX_CODE
,CAST(A.IS_FLAW AS DECIMAL(22,2)) AS INDEX_VALUE
,SUBSTR(V_DT, 1, 7) AS YEAR_MONTH
,V_DT AS ETL_DATE
,A.CUST_TYP AS CUST_TYPE
,A.FR_ID AS FR_ID
FROM ACRM_F_CI_ASSET_BUSI_PROTO A --资产协议表
WHERE A.IS_FLAW = '1'
GROUP BY A.CUST_ID
,A.CUST_TYP
,A.FR_ID
,A.IS_FLAW """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
ACRM_A_TARGET_D003025 = sqlContext.sql(sql)
ACRM_A_TARGET_D003025.registerTempTable("ACRM_A_TARGET_D003025")
dfn="ACRM_A_TARGET_D003025/" | +V_DT+".parquet"
ACRM_A_TARGET_D003025.cache()
nrows = ACRM_A_TARGET_D003025.count()
ACRM_A_TARGET_D003025.write.save(path=hdfs + '/' + dfn, mode='overwrite')
ACRM_A_TARGET_D003025.unpersist()
ACRM_F_CI_ASSET_BUSI_PROTO.unpersist()
ret = os.system("hdfs dfs -rm -r /"+dbname+"/ACRM_A_TARGET_D003025/"+V_DT_LD+".parquet")
et = datetime.now()
print("Step % | d start[%s] end[%s] use %d seconds, insert ACRM_A_TARGET_D003025 lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrows)
|
peter8472/GSM_SMS | gsm7.py | Python | apache-2.0 | 1,043 | 0.009 | #! /usr/bin/python
# coding=utf-8
"""Another gsm lookup class. This one should actually work"""
b= [u"@∆ 0¡P¿p"]
b.append(u"£_!1AQaq")
b.append(u'$Φ"2BRbr')
b.append(u"¥Γ#3CScs")
b.append(u"èΛ¤4DTdt")
b.append(u"éΩ%5EUeu")
b.append(u"ùΠ&6FVfv")
b.append(u"ìΨ'7GWgw")
b.append(u"òΣ(8HXhx")
b.append(u"ÇΘ)9IYiy")
b.append(u"\nΞ*:JZjz")
b.append(u"Ø +;KÄkä") #no character here, it's an escape
b.append(u"øÆ,<LÖlö")
b.append(u"\ræ-=MÑmñ")
b.append(u"Åß.>NÜnü")
b.append(u"åÉ/?O§oà")
class Gsm7(object):
def __init__(self):
pass
def look(self, codepoint):
row = codepoint & 0b0001111
col = codepoint >> 4
#print "%d:%d"% (row, col)
if row == 11 and | col == 1:
raise Exception("gsm escape char found")
try:
return b[row][col]
except IndexError(e):
print e
exit(1)
if __name__ == "__main__":
mygsm = G | sm7()
for x in range(0,3):
print mygsm.look(x)
print b[7][6]
|
sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/openstack/tests/unit/orchestration/v1/test_stack.py | Python | mit | 3,804 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from openstack.orchestration.v1 import stack
FAKE_ID = 'ce8ae86c-9810-4cb1-8888-7fb53bc523bf'
FAKE_NAME = 'test_stack'
FAKE = {
'capabilities': '1',
'creation_time': '2',
'description': '3',
'disable_rollback': True,
'id': FAKE_ID,
'links': '6',
'notification_topics': '7',
'outputs': '8',
'parameters': {'OS::stack_id': '9'},
'name': FAKE_NAME,
'status': '11',
'status_reason': '12',
'template_description': '13',
'template_url': 'http://www.example.com/wordpress.yaml',
'timeout_mins': '14',
'updated_time': '15',
}
FAKE_CREATE_RESPONSE = {
'stack': {
'id': FAKE_ID,
'links': [{
'href': 'stacks/%s/%s' % (FAKE_NAME, FAKE_ID),
'rel': 'self'}]}
}
class TestStack(testtools.TestCase):
def test_basic(self):
| sot = stack.Stack()
self.assertEqual('stack', sot.resource_key)
self.assertEqual('stacks', sot.resources_key)
self.assertEqual('/stacks', sot.base_path)
self.assertEqual('orchestration', sot.service.service_type)
self.assertTrue(sot.allow_create)
self.assertTrue(sot.allow_retrieve)
self.assertFalse(so | t.allow_update)
self.assertTrue(sot.allow_delete)
self.assertTrue(sot.allow_list)
def test_make_it(self):
sot = stack.Stack(FAKE)
self.assertEqual(FAKE['capabilities'], sot.capabilities)
self.assertEqual(FAKE['creation_time'], sot.created_at)
self.assertEqual(FAKE['description'], sot.description)
self.assertEqual(FAKE['disable_rollback'], sot.disable_rollback)
self.assertEqual(FAKE['id'], sot.id)
self.assertEqual(FAKE['links'], sot.links)
self.assertEqual(FAKE['notification_topics'],
sot.notification_topics)
self.assertEqual(FAKE['outputs'], sot.outputs)
self.assertEqual(FAKE['parameters'], sot.parameters)
self.assertEqual(FAKE['name'], sot.name)
self.assertEqual(FAKE['status'], sot.status)
self.assertEqual(FAKE['status_reason'],
sot.status_reason)
self.assertEqual(FAKE['template_description'],
sot.template_description)
self.assertEqual(FAKE['template_url'],
sot.template_url)
self.assertEqual(FAKE['timeout_mins'], sot.timeout_mins)
self.assertEqual(FAKE['updated_time'], sot.updated_at)
def test_create(self):
resp = mock.MagicMock()
resp.body = FAKE_CREATE_RESPONSE
sess = mock.Mock()
sess.post = mock.MagicMock()
sess.post.return_value = resp
sot = stack.Stack(FAKE)
sot.create(sess)
url = '/stacks'
body = FAKE.copy()
body.pop('id')
body.pop('name')
sess.post.assert_called_with(url, service=sot.service, json=body)
self.assertEqual(FAKE_ID, sot.id)
self.assertEqual(FAKE_NAME, sot.name)
def test_check(self):
session_mock = mock.MagicMock()
sot = stack.Stack(FAKE)
sot._action = mock.MagicMock()
body = {'check': ''}
sot.check(session_mock)
sot._action.assert_called_with(session_mock, body)
|
t-wissmann/qutebrowser | tests/unit/utils/test_qtutils.py | Python | gpl-3.0 | 33,751 | 0.000089 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.utils.qtutils."""
import io
import os
import os.path
import unittest
import unittest.mock
import pytest
from PyQt5.QtCore import (QDataStream, QPoint, QUrl, QByteArray, QIODevice,
QTimer, QBuffer, QFile, QProcess, QFileDevice)
from PyQt5.Q | tGui import QColor
from qutebrowser.utils import qtutils, utils, usertypes
import overflow_test_cases
if utils.is_linux:
# Those are not run on macOS because that seems to cause a hang sometimes.
# On Windows, we don't run them either because of
# https://github.com/pytest-dev/pytest/issues/3650
try:
# pylint: disable=no-name-in-module,useless-suppression
from test impo | rt test_file
# pylint: enable=no-name-in-module,useless-suppression
except ImportError:
# Debian patches Python to remove the tests...
test_file = None
else:
test_file = None
# pylint: disable=bad-continuation
@pytest.mark.parametrize(['qversion', 'compiled', 'pyqt', 'version', 'exact',
'expected'], [
# equal versions
('5.4.0', None, None, '5.4.0', False, True),
('5.4.0', None, None, '5.4.0', True, True), # exact=True
('5.4.0', None, None, '5.4', True, True), # without trailing 0
# newer version installed
('5.4.1', None, None, '5.4', False, True),
('5.4.1', None, None, '5.4', True, False), # exact=True
# older version installed
('5.3.2', None, None, '5.4', False, False),
('5.3.0', None, None, '5.3.2', False, False),
('5.3.0', None, None, '5.3.2', True, False), # exact=True
# compiled=True
# new Qt runtime, but compiled against older version
('5.4.0', '5.3.0', '5.4.0', '5.4.0', False, False),
# new Qt runtime, compiled against new version, but old PyQt
('5.4.0', '5.4.0', '5.3.0', '5.4.0', False, False),
# all up-to-date
('5.4.0', '5.4.0', '5.4.0', '5.4.0', False, True),
])
# pylint: enable=bad-continuation
def test_version_check(monkeypatch, qversion, compiled, pyqt, version, exact,
expected):
"""Test for version_check().
Args:
monkeypatch: The pytest monkeypatch fixture.
qversion: The version to set as fake qVersion().
compiled: The value for QT_VERSION_STR (set compiled=False)
pyqt: The value for PYQT_VERSION_STR (set compiled=False)
version: The version to compare with.
exact: Use exact comparing (==)
expected: The expected result.
"""
monkeypatch.setattr(qtutils, 'qVersion', lambda: qversion)
if compiled is not None:
monkeypatch.setattr(qtutils, 'QT_VERSION_STR', compiled)
monkeypatch.setattr(qtutils, 'PYQT_VERSION_STR', pyqt)
compiled_arg = True
else:
compiled_arg = False
actual = qtutils.version_check(version, exact, compiled=compiled_arg)
assert actual == expected
def test_version_check_compiled_and_exact():
with pytest.raises(ValueError):
qtutils.version_check('1.2.3', exact=True, compiled=True)
@pytest.mark.parametrize('version, is_new', [
('537.21', False), # QtWebKit 5.1
('538.1', False), # Qt 5.8
('602.1', True) # new QtWebKit TP5, 5.212 Alpha
])
def test_is_new_qtwebkit(monkeypatch, version, is_new):
monkeypatch.setattr(qtutils, 'qWebKitVersion', lambda: version)
assert qtutils.is_new_qtwebkit() == is_new
@pytest.mark.parametrize('backend, arguments, single_process', [
(usertypes.Backend.QtWebKit, ['--single-process'], False),
(usertypes.Backend.QtWebEngine, ['--single-process'], True),
(usertypes.Backend.QtWebEngine, [], False),
])
def test_is_single_process(monkeypatch, stubs, backend, arguments, single_process):
qapp = stubs.FakeQApplication(arguments=arguments)
monkeypatch.setattr(qtutils, 'QApplication', qapp)
monkeypatch.setattr(qtutils.objects, 'backend', backend)
assert qtutils.is_single_process() == single_process
class TestCheckOverflow:
"""Test check_overflow."""
@pytest.mark.parametrize('ctype, val',
overflow_test_cases.good_values())
def test_good_values(self, ctype, val):
"""Test values which are inside bounds."""
qtutils.check_overflow(val, ctype)
@pytest.mark.parametrize('ctype, val',
[(ctype, val) for (ctype, val, _) in
overflow_test_cases.bad_values()])
def test_bad_values_fatal(self, ctype, val):
"""Test values which are outside bounds with fatal=True."""
with pytest.raises(OverflowError):
qtutils.check_overflow(val, ctype)
@pytest.mark.parametrize('ctype, val, repl',
overflow_test_cases.bad_values())
def test_bad_values_nonfatal(self, ctype, val, repl):
"""Test values which are outside bounds with fatal=False."""
newval = qtutils.check_overflow(val, ctype, fatal=False)
assert newval == repl
class QtObject:
"""Fake Qt object for test_ensure."""
def __init__(self, valid=True, null=False, error=None):
self._valid = valid
self._null = null
self._error = error
def __repr__(self):
return '<QtObject>'
def errorString(self):
"""Get the fake error, or raise AttributeError if set to None."""
if self._error is None:
raise AttributeError
return self._error
def isValid(self):
return self._valid
def isNull(self):
return self._null
@pytest.mark.parametrize('obj, raising, exc_reason, exc_str', [
# good examples
(QtObject(valid=True, null=True), False, None, None),
(QtObject(valid=True, null=False), False, None, None),
# bad examples
(QtObject(valid=False, null=True), True, None, '<QtObject> is not valid'),
(QtObject(valid=False, null=False), True, None, '<QtObject> is not valid'),
(QtObject(valid=False, null=True, error='Test'), True, 'Test',
'<QtObject> is not valid: Test'),
])
def test_ensure_valid(obj, raising, exc_reason, exc_str):
"""Test ensure_valid.
Args:
obj: The object to test with.
raising: Whether QtValueError is expected to be raised.
exc_reason: The expected .reason attribute of the exception.
exc_str: The expected string of the exception.
"""
if raising:
with pytest.raises(qtutils.QtValueError) as excinfo:
qtutils.ensure_valid(obj)
assert excinfo.value.reason == exc_reason
assert str(excinfo.value) == exc_str
else:
qtutils.ensure_valid(obj)
@pytest.mark.parametrize('status, raising, message', [
(QDataStream.Ok, False, None),
(QDataStream.ReadPastEnd, True, "The data stream has read past the end of "
"the data in the underlying device."),
(QDataStream.ReadCorruptData, True, "The data stream has read corrupt "
"data."),
(QDataStream.WriteFailed, True, "The data stream cannot write to the "
"underlying device."),
])
def test_check_qdatastream(status, raising, message):
"""Test check_qdatastream.
Args:
status: The status to set on the QDataStream we test with.
raising: Whether check_qdatastream is expected to raise OSError.
message: The expected exception string.
|
bronycub/sugarcub | sugarcub/settings.py | Python | gpl-3.0 | 7,848 | 0.002548 | '''
Django settings for sugarcub project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
'''
from django.conf.global_settings import AUTHENTICATION_BACKENDS, STATICFILES_FINDERS
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
IS_PROD = os.getenv('DEPLOY_TYPE', 'dev') == 'prod'
DEBUG = not IS_PROD
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = (
'sugarcub',
'users',
'bbbff',
'agenda',
'stdimage',
'bootstrap3',
'django_admin_bootstrapped',
'django.contrib.admindocs',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'multiform',
'registration',
'el_pagination',
'bootstrap3_datetime',
'captcha',
'ws4redis',
'pipeline',
'core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'pipeline.middleware.MinifyHTMLMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.contrib.admindocs.middleware.XViewMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'core', 'templates'),
os.path.join(BASE_DIR, 'admin', 'templates'),
os.path.join(BASE_DIR, 'users', 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
'core.processors.custom_fields',
'core.processors.mailing_list',
],
'debug': False
}
},
]
# STATICFILES_DIRS = (
# os.path.join(BASE_DIR, 'admin', 'static'),
# )
ROOT_URLCONF = 'sugarcub.urls'
WSGI_APPLICATION = 'sugarcub.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.getenv('SQL_DB', 'postgres'),
'USER': os.getenv('SQL_USER', 'postgres'),
'HOST': os.getenv('SQL_HOST', 'postgres'),
'PORT': 5432,
}
}
if os.getenv('SQL_PASSWORD'):
DATABASES['default']['PASSWORD'] = os.getenv('SQL_PASSWORD')
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, '..', 'data', 'static')
STATICFILES_FINDERS = STATICFILES_FINDERS + [
'pipeline.finders.PipelineFinder',
'pipeline.finders.ManifestFinder',
]
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = {
'PIPELINE_ENABLED': IS_PROD,
'COMPILERS': ('pipeline.compilers.sass.SASSCompiler',),
'STYLESHEETS': {
'css': {
'source_filenames': (
'vendor/bootstrap/dist/css/bootstrap.min.css',
'vendor/bootstrap-datepicker/dist/css/bootstrap-datepicker3.min.css',
'vendor/cookieconsent2/build/dark-floating.css',
'vendor/css-social-buttons/css/zocial.css',
'vendor/font-awesome/css/font-awesome.min.css',
'vendor/leaflet/dist/leaflet.css',
'vendor/eonasdan-bootstrap-datetimepicker/build/css/bootstrap-datetimepicker.min.css',
'css/sugarcub.css',
'css/sugarcub-admin.css',
'css/bronycub.css',
),
'output_filename': 'css/sugarcub.css',
'extra_context': {
'media': 'screen,projection',
},
},
},
'CSS_COMPRESSOR': 'pipeline.compressors.yuglify.YuglifyCompressor',
'JAVASCRIPT': {
'js': {
'source_filenames': (
'vendor/jquery/dist/jquery.min.js',
'vendor/moment/min/moment.min.js',
'vendor/jquery-expander/jquery.expander.min.js',
'vendor/bootstrap/dist/js/bootstrap.min.js',
'vendor/cookieconsent2/build/cookieconsent.min.js',
'vendor/leaflet/dist/leaflet.js',
'vendor/eonasdan-bootstrap-datetimepicker/build/js/bootstrap-datetimepicker.min.js',
'js/dj.js',
'js/expander.js',
'js/konami.js',
),
'output_filename': 'js/sugarcub.js',
}
},
'JS_COMPRESSOR': 'pipeline.compressors.uglifyjs.UglifyJSCompressor',
}
# Media
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, '..', 'data', 'media')
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Auth configuration
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/login'
LOGOUT_URL = '/logout'
ACCOUNT_ACTIVATION_DAYS = 7
AUTH_PROFILE_MODULE = 'users.profile'
AUTHENTICATION_BACKENDS += ('users.utils.EmailModelBackend', )
# Registration
REGISTRATION_AUTO_LOGIN = True
# Celery
REDIS_HOST = os.getenv('REDIS_HOST', 'redis')
BROKER_URL = 'redis://{}:6379/0'.format(REDIS_HOST)
CELERY_RESULT_BACKEND = 'redis://{}:6379/0'.format(REDIS_HOST)
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TIMEZONE = TIME_ZONE
CELERY_ENABLE_UTC = True
CELERY_IMPORTS = ('users.models', 'users.utils',)
# Cache
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://{}:637 | 9/0'.format(REDIS_HOST),
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
}
},
}
# Session
SESSION_ENGINE = 'django.contrib.se | ssions.backends.cache'
SESSION_CACHE_ALIAS = 'default'
# Admin
DAB_FIELD_RENDERER = 'django_admin_bootstrapped.renderers.BootstrapFieldRenderer'
# Bootstrap
BOOTSTRAP3 = {
'horizontal_label_class': 'col-md-2',
'horizontal_field_class': 'col-md-10'
}
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': '/srv/app/data/debug.log',
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# Tests
# Per Collective Custom
from sugarcub.custom_settings import *
if IS_PROD:
from sugarcub.settings_prod import *
else:
from sugarcub.settings_dev import *
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
|
rhdedgar/openshift-tools | ansible/roles/lib_gcloud/library/gcloud_dm_resource_reconciler.py | Python | apache-2.0 | 25,410 | 0.002676 | #!/usr/bin/env python
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
GcloudCLI class that wraps the oc commands in a subprocess
'''
import atexit
import json
import os
import random
# Not all genearated modules use this.
# pylint: disable=unused-import
import re
import shutil
import string
import subprocess
import tempfile
import yaml
# Not all genearated modules use this.
# pylint: disable=unused-import
import copy
# pylint: disable=import-error
from apiclient.discovery import build
# pylint: disable=import-error
from oauth2client.client import GoogleCredentials
from ansible.module_utils.basic import AnsibleModule
class GcloudCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class GcloudCLI(object):
''' Class to wrap the command line tools '''
def __init__(self, credentials=None, project=None, verbose=False):
''' Constructor for GcloudCLI '''
self.scope = None
self._project = project
if not credentials:
self.credentials = GoogleCredentials.get_application_default()
else:
tmp = tempfile.NamedTemporaryFile()
tmp.write(json.dumps(credentials))
tmp.seek(0)
self.credentials = GoogleCredentials.from_stream(tmp.name)
tmp.close()
self.scope = build('compute', 'beta', credentials=self.credentials)
self.verbose = verbose
@property
def project(self):
'''property for project'''
return self._project
def _create_image(self, image_name, image_info):
'''create an image name'''
cmd = ['compute', 'images', 'create', image_name]
for key, val in image_info.items():
if val:
cmd.extend(['--%s' % key, val])
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _delete_image(self, image_name):
'''delete image by name '''
cmd = ['compute', 'images', 'delete', image_name]
if image_name:
cmd.extend(['describe', image_name])
else:
cmd.append('list')
cmd.append('-q')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_images(self, image_name=None):
'''list images.
if name is supplied perform a describe and return
'''
cmd = ['compute', 'images']
if image_name:
cmd.extend(['describe', image_name])
else:
cmd.append('list')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_deployments(self, simple=True):
'''list deployments by name '''
cmd = ['deployment-manager', 'deployments', 'list']
if simple:
cmd.append('--simple-list')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _delete_deployment(self, dname):
'''list deployments by name '''
cmd = ['deployment-manager', 'deployments', 'delete', dname, '-q']
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _create_deployment(self, dname, config=None, opts=None):
''' create a deployment'''
cmd = ['deployment-manager', 'deployments', 'create', dname]
if config:
if isinstance(config, dict):
config = Utils.create_file(dname, config)
if isinstance(config, str) and os.path.exists(config):
cmd.extend(['--config=%s' % config])
if opts:
for key, val in opts.items():
cmd.append('--%s=%s' % (key, val))
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _update_deployment(self, dname, config=None, opts=None):
''' create a deployment'''
cmd = ['deployment-manager', 'deployments', 'update', dname]
if config:
if isinstance(config, dict):
config = Utils.create_file(dname, config)
if isinstance(config, str) and os.path.exists(config):
cmd.extend(['--config=%s' % config])
if opts:
for key, val in opts.items():
cmd.append('--%s=%s' % (key, val))
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_manifests(self, deployment, mname=None):
''' list manifests
if a name is specified then perform a describe
'''
cmd = ['deployment-manager', 'manifests', '--deployment', deployment]
if mname:
cmd.extend(['describe', mname])
else:
cmd.append('list')
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _delete_address(self, aname):
''' list addresses
if a name is specified then perform a describe
'''
cmd = ['compute', 'addresses', 'delete', aname, '-q']
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_addresses(self, aname=None):
''' list addresses
if a name is specified then perform a describe
'''
cmd = ['compute', 'addresses']
if aname:
cmd.extend(['describe', aname])
else:
cmd.append('list')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _create_address(self, address_name, address_info, address=None, isglobal=False):
''' create a deployment'''
cmd = ['compute', 'addresses', 'create', address_name]
if address:
cmd.append(address)
if isglobal:
cmd.append('--global')
for key, val in address_info.items():
if val:
cmd.extend(['--%s' % key, val])
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_metadata(self, resource_type, name=None, zone=None):
''' list metadata'''
cmd = ['compute', resource_type, 'describe']
if name:
cmd.extend([name])
if zone:
cmd.extend(['--zone', zone])
return self.gcloud_cmd(cmd, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _delete_metadata(self, resource_type, keys, remove_all=False, name=None, zone=None):
'''create metadata'''
cmd = ['compute', resource_type, 'remove-metadata']
if name:
cmd.extend([name])
if zone:
cmd.extend(['--zone', zone])
if remove_all:
cmd.append('--all')
else:
cmd.append('--keys')
cmd.append(','.join(keys))
cmd.append('-q')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _create_metadata(self, resource_type, metadata=None, metadata_from_file=None, name=None, zone=None):
'''create metadata'''
cmd = ['compute', resource_type, 'add-metadata']
if name:
cmd | .extend([name])
if zone:
cmd.extend(['--zone', zone])
data = None
if metadata_from_file:
cmd.append('--metadata-from-file')
data = metadata_from_file
else:
cmd.append('--metadata')
data = metada | ta
cmd.append(','.join(['%s=%s' % (key, val) for key, val in data.items()]))
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_service_accounts(self, sa_name=None):
'''return service accounts '''
cmd = ['iam', 'service-accounts']
if sa_name:
cmd.extend(['describe', sa_name])
else:
cmd.append('list')
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _delete_service_account(self, sa_name):
'''delete service a |
pombredanne/https-git.fedorahosted.org-git-kobo | tests/test_http.py | Python | lgpl-2.1 | 1,375 | 0.002909 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import run_tests # set sys.path
import tempfile
import os
from kobo.http import *
class TestPOSTTransport(unittest.TestCase):
def setUp(self):
self.postt = POSTTransport()
def test_get_content_type(self):
tf0 = tempfile.mkstemp()[1]
tf1 = tempfile.mkstemp(suffix=".txt")[1]
tf2 = tempfile.mkstemp(suffix=".rtf")[1]
tf3 = tempfile.mkstemp(s | uffix=".avi")[1]
self.assertEqual(self.postt.get_content_type(tf0), "application/octet-stream")
self.assertEqual(self.postt.get_content_type(tf1), "text/plain")
# *.rtf: py2.7 returns 'application/rtf'; py2.4 returns 'text/rtf'
self.assertEqual(self.postt.get_content_type(tf2).split("/")[1], "rtf")
self.assertTrue(self.postt.get_content_type(tf2) in ("application/rtf", "text/rtf"))
self.assertEqual(self.postt.get_content_type(tf3), "vid | eo/x-msvideo")
def test_add_file(self):
tf1 = tempfile.mkstemp()[1]
tf2 = tempfile.mkstemp()[1]
tf3 = open(tempfile.mkstemp()[1])
os.unlink(tf1)
self.assertRaises(OSError, self.postt.add_file, "file", tf1)
self.assertEqual(self.postt.add_file("file", tf2), None)
self.assertRaises(TypeError, self.postt.add_file, "file", tf3)
if __name__ == '__main__':
unittest.main()
|
onecodex/onecodex | onecodex/vendored/potion_client/__init__.py | Python | mit | 6,097 | 0.002788 | # flake8: noqa
from functools import partial
from operator import getitem, delitem, setitem
from six.moves.urllib.parse import urlparse, urljoin
from weakref import WeakValueDictionary
import requests
try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping # removed in Python 3.9+
from .converter import PotionJSONDecoder, PotionJSONSchemaDecoder
from .resource import Reference, Resource, uri_for
from .links import Link
from .utils import upper_camel_case, snake_case
class Client(object):
# TODO optional HTTP/2 support: this makes multiple queries simultaneously.
def __init__(self, api_root_url, schema_path='/schema', fetch_schema=True, **session_kwargs):
self._instances = WeakValueDictionary()
self._resources = {}
self.session = session = requests.Session()
for key, value in session_kwargs.items():
setattr(session, key, value)
parse_result = urlparse(api_root_url)
self._root_url = '{}://{}'.format(parse_result.scheme, parse_result.netloc)
self._api_root_url = api_root_url # '{}://{}'.format(parse_result.scheme, parse_result.netloc)
self._root_path = parse_result.path
self._schema_url = api_root_url + schema_path
if fetch_schema:
self._fetch_schema()
def _fetch_schema(self):
schema = self.session \
.get(self._schema_url) \
.json(cls=PotionJSONSchemaDecoder,
referrer=self._schema_url,
client=self)
# NOTE these should perhaps be definitions in Flask-Potion
for name, resource_schema in schema['properties'].items():
resource = self.resource_factory(name, resource_schema)
setattr(self, upper_camel_case(name), resource)
def instance(self, uri, cls=None, default=None, **kwargs):
instance = self._instances.get(uri, None)
if instance is None:
if cls is None:
try:
cls = self._resources[uri[:uri.rfind('/')]]
except KeyError:
cls = Reference
if isinstance(default, Resource) and default._uri is None:
default._status = 200
default._uri = uri
instance = default
else:
instance = cls(uri=uri, **kwargs)
self._instances[uri] = instance
return instance
def fetch(self, uri, cls=PotionJSONDecoder, **kwargs):
# TODO handle URL fragments (#properties/id etc.)
response = self.session \
.get(urljoin(self._root_url, uri, True))
response.raise_for_status()
return response.json(cls=cls,
client=self,
referrer=uri,
**kwargs)
def resource_factory(self, name, schema, resource_cls=None):
"""
Registers a new resource with a given schema. The schema must not have any unresolved references
(such as `{"$ref": "#"}` for self-references, or otherwise). A subclass of :class:`Resource`
may be provided to add specific functionality to the resulting :class:`Resource`.
:param str name:
:param dict schema:
:param Resource resource_cls: a subclass of :class:`Resource` or None
:return: The new :class:`Resource`.
"""
cls = type(str(upper_camel_case(name)), (resource_cls or Resource, MutableMapping), {
'__doc__': schema.get('description', '')
})
cls._schema = schema
cls._client = self
| cls._links = links = {}
for link_schema in schema['links']:
link = Link(self,
rel=link_schema['rel'],
href=link_schema['href'],
method=link_schema['method'],
schema=link_schema.get('schema', None),
target_schema=link_schema.get('targetSchema', None))
# Set Resource._self, etc. for the special methods as they are managed by the Resource class
| if link.rel in ('self', 'instances', 'create', 'update', 'destroy'):
setattr(cls, '_{}'.format(link.rel), link)
links[link.rel] = link
if link.rel != 'update': # 'update' is a special case because of MutableMapping.update()
setattr(cls, snake_case(link.rel), link)
# TODO routes (instance & non-instance)
for property_name, property_schema in schema.get('properties', {}).items():
# skip $uri and $id as these are already implemented in Resource and overriding them causes unnecessary
# fetches.
if property_name.startswith('$'):
continue
if property_schema.get('readOnly', False):
# TODO better error message. Raises AttributeError("can't set attribute")
setattr(cls,
property_name,
property(fget=partial((lambda name, obj: getitem(obj, name)), property_name),
doc=property_schema.get('description', None)))
else:
setattr(cls,
property_name,
property(fget=partial((lambda name, obj: getitem(obj, name)), property_name),
fset=partial((lambda name, obj, value: setitem(obj, name, value)), property_name),
fdel=partial((lambda name, obj: delitem(obj, name)), property_name),
doc=property_schema.get('description', None)))
root = None
if 'instances' in links:
root = cls._instances.href
elif 'self' in links:
root = cls._self.href[:cls._self.href.rfind('/')]
else:
root = self._root_path + '/' + name.replace('_', '-')
self._resources[root] = cls
return cls
ASC = ASCENDING = False
DESC = DESCENDING = True
|
texit/texit | graph.py | Python | mit | 2,827 | 0.020516 | import os
import json
from subprocess import check_output, Calle | dProcessError
#Constants
_TREE_PATH="data/graph/"
def renderGraph(query):
"""
Returns the path to a svg file that
contains the grap | h render of the query.
Creates the svg file itself if it
does not already exist.
"""
#Compute the hash of the query string
qhash = hashFunc(query)
if (not os.path.exists(_TREE_PATH+str(qhash))):
#Create bucket if it doesn't already exist.
os.makedirs(_TREE_PATH+str(qhash))
#Create the lookup table for the bucket.
bucketTableFile=open(_TREE_PATH+str(qhash)+"/lookup.json",'w')
bucketTableFile.write("{}")
bucketTableFile.close()
#Load bucketTable
bucketTableFile=open(_TREE_PATH+str(qhash)+"/lookup.json",'r+')
bucketTable = json.loads(bucketTableFile.read())
if query not in bucketTable.keys():
#File is not cache! Create PNG in bucket.
filename=str(len(os.listdir(_TREE_PATH+str(qhash))))+".svg"
fn=query.split(",")[0]
rest=query.split(",")[1:]
myParams={i[0]:i[1] for i in map(lambda x:x.split("="),rest)}
if not TeXToGraph(fn,_TREE_PATH+str(qhash),filename,myParams):
#An error has occurred while rendering the LaTeX.
return open(handleTeXRenderError("An error has occurred while rendering LaTeX."))
#Update bucketTable
bucketTable[query]=filename
#Write back to bucketTableFile
bucketTableFile.seek(0)
bucketTableFile.write(json.dumps(bucketTable))
bucketTableFile.close()
#Return path to newly created/existing file
return open(_TREE_PATH+str(qhash)+"/"+bucketTable[query]).read()
def hashFunc(s):
"""
Call some hashfunc and return the result.
Goes "hashy hashy".
"""
return abs(hash(s))
def TeXToGraph(fn,targetDir,name,paramsIn):
"""
Renders a graph in query to a svg in targetDir named name. Return true if successful, false if not.
"""
params={
'xmin':-10,
'xmax':10,
'ymin':-10,
'ymax':10,
'xlabel':"x",
'ylabel':"y",
}
for i in paramsIn:
if i!='xlabel' and i !='ylabel':
params[i]=int(paramsIn[i])
else:
params[i]=paramsIn[i]
print params
print fn
try:
check_output("./to_graph.sh {0} {1} {2} {3} {4} {5} {6} {7} {8}".format(fn,params['xmin'],params['xmax'],params['ymin'],params['ymax'],params['xlabel'],params['ylabel'],targetDir,name).split())
except CalledProcessError:
return False
return True
def handleTeXRenderError(errorMsg):
"""
Handles an error encountered while attempting to render a TeX string
"""
print errorMsg
return "assets/img/error.png"
|
lolosk/microblog | app/__init__.py | Python | gpl-2.0 | 71 | 0.028169 |
fr | om flask import Flask
app = Flask(__name__)
from app import views
| |
endlessm/chromium-browser | third_party/depot_tools/recipes/recipe_modules/bot_update/__init__.py | Python | bsd-3-clause | 1,081 | 0.00185 | DEPS = [
'depot_tools',
'gclient',
'gerrit',
'gitiles',
'recipe_engine/buildbucket',
'recipe_engine/context',
'recipe_engine/commit_position',
'recipe_engine/cq',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/runtime',
'recipe_engine/source_manifest' | ,
'recipe_engine/step',
'tryserver',
]
from recipe_engine.recipe_api import Property
from recipe_engine.config import ConfigGroup, Single
PROPERTIES = {
# Gerrit patches will have all properties about them prefixed with patch_.
'deps_revision_overrides': Property(default={}),
'fail_patch': Property(default=None, kind=str),
'$depot_tools/bot_update': Property(
help='Properties specific to bot_update modu | le.',
param_name='properties',
kind=ConfigGroup(
# Whether we should do the patching in gclient instead of bot_update
apply_patch_on_gclient=Single(bool),
),
default={},
),
}
|
tbodt/v8py | tests/test_context.py | Python | lgpl-3.0 | 3,108 | 0.004505 | import pytest
import time
from v8py import JavaScriptTerminated, current_context, new
def test_glob(context):
context.eval('foo = "bar"')
assert context.glob.foo == 'bar'
def test_getattr(context):
context.foo = 'bar'
assert context.foo == 'bar'
assert context.glob.foo == 'bar'
assert context.eval('foo') == 'bar'
def test_getitem(context):
context['foo'] = 'bar'
assert context['foo'] == 'bar'
assert context.glob['foo'] == 'bar'
assert context.eval('foo') == 'bar'
def test_timeout(context):
with pytest.raises(JavaScriptTerminated):
context.eval('for(;;) {}', timeout=0.1)
def test_timeout_property(context_with_timeout):
assert context_with_timeout.timeout == 0.1
start = time.time()
with pytest.raises(JavaScriptTerminated):
context_with_timeout.eval('for(;;) {}')
diff = time.time() - start
assert diff >= 0.1 and diff < 0.2
context_with_timeout.timeout = 0.25
assert context_with_timeout.timeout == 0.25
start = time.time()
with pytest.raises(JavaScriptTerminated):
context_with_timeout.eval('for(;;) {}')
diff = time.time() - start
assert diff >= 0.25 and diff < 0.3
def test_timeout_context_level(context_with_timeout):
with pytest.raises(JavaScriptTerminated):
context_with_timeout.eval('for(;;) {}')
def test_timeout_new(context_with_timeout):
context_with_timeout.eval('function Freeze() { while(true); }')
with pytest.raises(JavaScriptTerminated):
new(context_with_timeout.glob.Freeze)
def test_timeout_call(context_with_timeout):
context_with_timeout.eval('function freeze() { while(true); }')
with pytest.raises(JavaScriptTerminated):
context_with_timeout.glob.freeze()
def test_timeout_proxy(context_with_timeout):
context_with_timeout.eval("""
user = {};
user.testA = 0;
user.testC = 10;
proxy = new Proxy(user, {
get(target, prop) {
if (prop == "testA") while(true);
},
set(target, prop, value) {
if (prop == "testB") while(true);
return false | ;
},
deleteProperty(target, phrase) {
if (phrase == "testC") while(true);
return false;
}
| });
""")
proxy = context_with_timeout.glob.proxy
with pytest.raises(JavaScriptTerminated):
testA = proxy.testA
with pytest.raises(JavaScriptTerminated):
proxy.testB = 5
with pytest.raises(JavaScriptTerminated):
del proxy.testC
def test_expose(context):
def f(): return 'f'
def g(): return 'g'
context.expose(f, g, h=f)
assert context.eval('f()') == 'f'
assert context.eval('g()') == 'g'
assert context.eval('h()') == 'f'
def f(): pass
def test_expose_module(context):
import test_context
context.expose_module(test_context)
assert context.eval('f()') is None
def test_current_context(context):
assert current_context() is None
def f():
assert current_context() is context
context.expose(f)
context.eval('f()')
|
m-rossi/matplotlib2tikz | test/test_text_overlay.py | Python | mit | 2,420 | 0.00124 | import matplotlib.pyplot as plt
import numpy
from helpers import assert_equality
def plot():
fig = plt.figure()
xxx = numpy.linspace(0, 5)
yyy = xxx ** 2
plt.text(
1,
5,
"test1",
size=50,
rotation=30.0,
ha="center",
va="bottom",
color="r",
style="italic",
weight="light",
bbox=dict(
boxstyle="round, p | ad=0.2",
ec=(1.0, 0.5, 0.5),
fc=(1.0, 0.8, 0.8),
ls="dashdot",
),
)
plt.text(
3,
6,
"test2",
size=50,
rotation=-30.0,
ha="center",
va="center",
color="b",
weight="bold",
bbox=dict(boxstyle="square", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)),
)
plt.text(
4,
8,
"test3",
size=20,
rotation=90.0,
ha | ="center",
va="center",
color="b",
weight="demi",
bbox=dict(
boxstyle="rarrow", ls="dashed", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)
),
)
plt.text(
4,
16,
"test4",
size=20,
rotation=90.0,
ha="center",
va="center",
color="b",
weight="heavy",
bbox=dict(
boxstyle="larrow", ls="dotted", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)
),
)
plt.text(
2,
18,
"test5",
size=20,
ha="center",
va="center",
color="b",
bbox=dict(boxstyle="darrow", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)),
)
plt.text(
1,
20,
"test6",
size=20,
ha="center",
va="center",
color="b",
bbox=dict(boxstyle="circle", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)),
)
plt.text(
3,
23,
"test7",
size=20,
ha="center",
va="center",
color="b",
bbox=dict(boxstyle="roundtooth", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)),
)
plt.text(
3,
20,
"test8",
size=20,
ha="center",
va="center",
color="b",
bbox=dict(boxstyle="sawtooth", ec=(1.0, 0.5, 0.5), fc=(1.0, 0.8, 0.8)),
)
plt.plot(xxx, yyy, label="a graph")
plt.legend()
return fig
def test():
assert_equality(plot, __file__[:-3] + "_reference.tex")
return
|
luosch/leetcode | python/Find Minimum in Rotated Sorted Array.py | Python | mit | 428 | 0.002336 | class Solution(object):
def findMin(s | elf, nums):
left = 0
right = len(nums) - 1
mid = (left + right) / 2
while left <= right:
if nums[left] <= nums[ | right]:
return nums[left]
mid = (left + right) / 2
if (nums[mid] >= nums[left]):
left = mid + 1
else:
right = mid
return nums[mid]
|
Shiwin/LiteNote | noties/urls.py | Python | gpl-2.0 | 1,648 | 0.016383 | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.auth import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', | 'lite_note.views.home', name='home'),
url(r'^test','lite_note.views.new_home',name='new_home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^login/', views.login, name='login'),
url(r'^logout/', views.logout, {'next_page': 'home'}, name | ='logout'),
url(r'^register/', 'regsiter.views.registration', name='registration_register'),
url(r'^create/', 'lite_note.views.create_note', name='create_note'),
url(r'^unknown/', 'lite_note.views.enter_anonymous_user', name='enter_anonymous'),
url(r'^note/(?P<id>[0-9]+)/', 'lite_note.views.note', name='note'),
url(r'^delete/(?P<id>[0-9]+)','lite_note.tools.delet_note'),
url(r'^private/(?P<id>[0-9]+)','lite_note.tools.make_private_note'),
url(r'^public/(?P<id>[0-9]+)','lite_note.tools.make_public_note'),
url(r'^favorite/(?P<id>[0-9]+)','lite_note.tools.make_favorite_note'),
url(r'^unfavorite/(?P<id>[0-9]+)','lite_note.tools.make_usual_note'),
url(r'^get_login','regsiter.views.request_login'),
url(r'^test','lite_note.views.new_home',name='new_home'),
url(r'^get_notes','lite_note.views.new_note',name='new_note')
)
|
jdhenke/ally | core.py | Python | mit | 4,087 | 0.005872 | #!/bin/python
'''
Library for formulating and solving game trees as linear programs.
'''
class Node(object):
'''Abstract class to represent a node in the game tree.'''
def solve(self):
'''
Should populate the solutions dictionary.
solutions: TerminalNode ==> list of lists of inequalities
Basically, if you treat the inequalities as booleans, it stores
the requirements to arrive at the terminal node in CDF form.
'''
raise NotImplementedError(
"Solve is not defined for Node. \
Instantiate a subclass instead")
class TerminalNode(Node):
'''A leaf in the game tree.'''
def __init__(self, name, utilities):
'''
@name - A string which uniquely identifies this node
@utilities - Anything.
Must mesh with FolderNode utility functions though
'''
self.name = name
self.utilities = utilities
self.solutions = {self: singleton(True)}
def solve(self):
# pass because the only solution is this node itself
# stops the recursion
pass
def __str__(self):
return self.name
def __repr__(self):
return self.name
def __hash__(self):
return hash(self.name)
class FolderNode(Node):
def __init__(self, children, util_func):
self.children = children
self.solutions = {}
self.util = util_func
def solve(self):
'''Calls solve on entire subtree too'''
# if empty folder, no children
if len(self.children) == 0:
return
# at least 1 child, so solve all of them
for child in self.children:
child.solve()
for leaf in child.solutions:
self.solutions[leaf] = singleton(False)
# if only one child, there is no choice, so use decision makeing
# criteria of that child
if len(self.children) == 1:
self.solution = self.children[0].solutions
return
# if more than 1 child, will need to find all ways possible to arrive
# at each leaf
for leftLeaf, rightLeaf, leftChild, rightChild in self._gen_children():
# requirements to come down to decision between these two leaves
req = And(leftChild.solutions[leftLeaf], rightChild.solutions[rightLeaf])
# get utilities of each leaf
leftUtil = self.util(leftLeaf)
rightUtil = self.util(rightLeaf)
# to choose left leaf, need req and that left is better than right
reqLeftWins = And(req, singleton( leftUtil >= rightUtil ))
reqRightWins = And(req, singleton( rightUtil >= leftUtil ))
# add this to possible solutions
self.solutions[leftLeaf] = Or(self.solutions[leftLeaf], reqLeftWins)
self.solutions[rightLeaf] = Or(self.solutions[rightLeaf], reqRightWins)
def _gen_children(self):
'''
Generator for all pairs of leaves l1, l2 st.
l1 and l2 are in different immediate subtrees.
Also includes these immediate subtrees roots i.e. this nodes
direct children.
So yields: (leftLeaf, rightLeaf, leftChild, rightChild, )
'''
for i in xrange(len(self.children)-1):
for j in xrange(i + 1, len(self.children)):
leftChild = self.children[i]
rightChild = self.children[j]
leftLeaves = leftChild.solutions.keys()
rightLeaves = rightChild.solutions.keys()
| for leftLeaf i | n leftLeaves:
for rightLeaf in rightLeaves:
yield (leftLeaf, rightLeaf, leftChild, rightChild, )
# functions which maintain CDF form of inequalities
def And(a, b):
output = []
for x in a:
for y in b:
output.append(x+y)
# must be tuple to be able to add together
return tuple(output)
def Or(a, b):
return a+b
def singleton(x):
# must be a list of lists
return ((x,),)
|
pawkoz/dyplom | blender/doc/python_api/examples/bpy.props.4.py | Python | gpl-2.0 | 495 | 0.00202 | """
Update Example
++++++++++++++
It can be useful to perform an action when a property is changed and | can be
used to update other properties or synchronize with exte | rnal data.
All properties define update functions except for CollectionProperty.
"""
import bpy
def update_func(self, context):
print("my test function", self)
bpy.types.Scene.testprop = bpy.props.FloatProperty(update=update_func)
bpy.context.scene.testprop = 11.0
# >>> my test function <bpy_struct, Scene("Scene")>
|
androomerrill/scikit-nano | sknano/apps/nanogen_gui/_pyqt4_ui_mwnt_Ch_list_item_dialog.py | Python | bsd-2-clause | 5,271 | 0.001897 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mwnt_Ch_list_item_dialog.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MWNTChListItemDialog(object):
def setupUi(self, MWNTChListItemDialog):
MWNTChListItemDialog.setObjectName(_fromUtf8("MWNTChListItemDialog"))
MWNTChListItemDialog.resize(302, 103)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
MWNTChListItemDialog.setFont(font)
self.verticalLayout = QtGui.QVBoxLayout(MWNTChListItemDialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_7 = QtGui.QLabel(MWNTChListItemDialog)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(16)
self.label_7.setFont(font)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.horizontalLayout_2.addWidget(self.label_7)
self.n_spin_box = QtGui.QSpinBox(MWNTChListItemDialog)
self.n_spin_box.setMinimumSize(QtCore.QSize(90, 36))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(16)
self.n_spin_box.setFont(font)
self.n_spin_box.setMaximum(100)
self.n_spin_box.setProperty("value", 10)
self.n_spin_box.setObjectName(_fromUtf8("n_spin_box"))
self.horizontalLayout_2.addWidget(self.n_spin_box)
self.horizontalLayout_3.addLayout(self.horizontalLayout_2)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.label_14 = QtGui.QLabel(MWNTChListItemDialog)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(16)
self.label_14.setFont(font)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.horizontalLayout_4.addWidget(self.label_14)
self.m_spin_box = QtGui.QSpinBox(MWNTChListItemDialog)
| self.m_spin_box.setMinimumSize(QtCore.QSize(90, 36))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(16)
self.m_spin_box.setFont(font)
self.m_spin_box.setMaximum(100)
self.m_spin_box.setProperty("value", 10)
self.m_spin_box.setObjectName(_fromUtf8("m_spin_box"))
self.horizontalLayout_4.addWidget(self.m_spin_bo | x)
self.horizontalLayout_3.addLayout(self.horizontalLayout_4)
self.horizontalLayout_5.addLayout(self.horizontalLayout_3)
self.verticalLayout.addLayout(self.horizontalLayout_5)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.ok_push_button = QtGui.QPushButton(MWNTChListItemDialog)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(12)
self.ok_push_button.setFont(font)
self.ok_push_button.setObjectName(_fromUtf8("ok_push_button"))
self.horizontalLayout.addWidget(self.ok_push_button)
self.cancel_push_button = QtGui.QPushButton(MWNTChListItemDialog)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(12)
self.cancel_push_button.setFont(font)
self.cancel_push_button.setObjectName(_fromUtf8("cancel_push_button"))
self.horizontalLayout.addWidget(self.cancel_push_button)
self.verticalLayout.addLayout(self.horizontalLayout)
spacerItem = QtGui.QSpacerItem(20, 2, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.retranslateUi(MWNTChListItemDialog)
QtCore.QMetaObject.connectSlotsByName(MWNTChListItemDialog)
def retranslateUi(self, MWNTChListItemDialog):
MWNTChListItemDialog.setWindowTitle(_translate("MWNTChListItemDialog", "(n, m) Dialog", None))
self.label_7.setText(_translate("MWNTChListItemDialog", "<html><head/><body><p align=\"right\">n = </p></body></html>", None))
self.label_14.setText(_translate("MWNTChListItemDialog", "<html><head/><body><p align=\"right\">m = </p></body></html>", None))
self.ok_push_button.setText(_translate("MWNTChListItemDialog", "OK", None))
self.cancel_push_button.setText(_translate("MWNTChListItemDialog", "Cancel", None))
|
google/lasr | third_party/chamfer3D/dist_chamfer_3D.py | Python | apache-2.0 | 2,341 | 0.002136 | from torch import nn
from torch.autograd import Function
import torch
import importlib
import os
chamfer_found = importlib.find_loader("chamfer_3D") is not None
if not chamfer_found:
## Cool trick from https://github.com/chrdiller
print("Jitting Chamfer 3D")
from torch.utils.cpp_extension import load
chamfer_3D = load(name="chamfer_3D",
sources=[
"/".join(os.path.abspath(__file__).split('/')[:-1] + ["chamfer_cuda.cpp"]),
"/".join(os.path.abspath(__file__).split('/')[:-1] + ["chamfer3D.cu"]),
])
print("Loaded JIT 3D CUDA chamfer distance")
else:
import chamfer_3D
print("Loaded compiled 3D CUDA chamfer distance")
# Chamfer's distanc | e module @thibaultgroueix
# GPU tensors only
class chamfer_3DFunction(Function):
@staticmethod
def forward(ctx, xyz1, xyz2):
batchsize, n, _ = xyz1.size()
_, m, _ = xyz2.size()
device = xyz1.device
| dist1 = torch.zeros(batchsize, n)
dist2 = torch.zeros(batchsize, m)
idx1 = torch.zeros(batchsize, n).type(torch.IntTensor)
idx2 = torch.zeros(batchsize, m).type(torch.IntTensor)
dist1 = dist1.to(device)
dist2 = dist2.to(device)
idx1 = idx1.to(device)
idx2 = idx2.to(device)
torch.cuda.set_device(device)
chamfer_3D.forward(xyz1, xyz2, dist1, dist2, idx1, idx2)
ctx.save_for_backward(xyz1, xyz2, idx1, idx2)
return dist1, dist2, idx1, idx2
@staticmethod
def backward(ctx, graddist1, graddist2, gradidx1, gradidx2):
xyz1, xyz2, idx1, idx2 = ctx.saved_tensors
graddist1 = graddist1.contiguous()
graddist2 = graddist2.contiguous()
device = graddist1.device
gradxyz1 = torch.zeros(xyz1.size())
gradxyz2 = torch.zeros(xyz2.size())
gradxyz1 = gradxyz1.to(device)
gradxyz2 = gradxyz2.to(device)
chamfer_3D.backward(
xyz1, xyz2, gradxyz1, gradxyz2, graddist1, graddist2, idx1, idx2
)
return gradxyz1, gradxyz2
class chamfer_3DDist(nn.Module):
def __init__(self):
super(chamfer_3DDist, self).__init__()
def forward(self, input1, input2):
input1 = input1.contiguous()
input2 = input2.contiguous()
return chamfer_3DFunction.apply(input1, input2)
|
geotagx/geotagx-pybossa-archive | pybossa/util.py | Python | agpl-3.0 | 11,505 | 0.000782 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from datetime import timedelta
from functools import update_wrapper
import csv
import codecs
import cStringIO
from flask import abort, request, make_response, current_app
from functools import wraps
from flask_oauth import OAuth
from flask.ext.login import current_user
from math import ceil
import json
def jsonpify(f):
"""Wraps JSONified output for JSONP"""
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f(*args, **kwargs).data) + ')'
return current_app.response_class(content,
mimetype='application/javascript')
else:
return f(*args, **kwargs)
return decorated_function
def admin_required(f):
"""Checks if the user is and admin or not"""
@wraps(f)
def decorated_function(*args, **kwargs):
if current_user.admin:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
# from http://flask.pocoo.org/snippets/56/
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
# From http://stackoverflow.com/q/1551382
def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
from datetime import datetime
import dateutil.parser
now = datetime.now()
time = dateutil.parser.parse(time)
if type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return ' '.join([str(second_diff / 60), "minutes ago"])
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return ' '.join([str(second_diff / 3600), "hours ago"])
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return ' '.join([str(day_diff), "days ago"])
if day_diff < 31:
return ' '.join([str(day_diff / 7), "weeks ago"])
if day_diff < 60:
return ' '.join([str(day_diff / 30), "month ago"])
if day_diff < 365:
return ' '.join([str(day_diff / 30), "months ago"])
if day_diff < (365 * 2):
return ' '.join([str(day_diff / 365), "year ago"])
return ' '.join([str(day_diff / 365), "years ago"])
class Pagination(object):
def __init__(self, page, per_page, total_count):
self.page = page
self.per_page = per_page
self.total_count = total_count
@property
def pages(self):
return int(ceil(self.total_count / float(self.per_page)))
@property
def has_prev(self):
return self.page > 1
@property
def has_next(self):
return self.page < self.pages
def iter_pages(self, left_edge=0, left_current=2, right_current=3,
right_edge=0):
last = 0
for num in xrange(1, self.pages + 1):
if (num <= left_edge or
(num > self.page - left_current - 1 and
num < self.page + right_current) or
num > self.pages - right_edge):
if last + 1 != num:
yield None
yield num
last = num
class Twitter:
oauth = OAuth()
def __init__(self, c_k, c_s):
#oauth = OAuth()
# Use Twitter as example remote application
self.oauth = self.oauth.remote_app(
'twitter',
# unless absolute urls are used to make requests,
# this will be added before all URLs. This is also true for
# request_token_url and others.
base_url='https://api.twitter.com/1/',
# where flask should look for new request tokens
request_token_url='https://api.twitter.com/oauth/request_token',
# where flask should exchange the token with the remote application
access_token_url='https://api.twitter.com/oauth/access_token',
# twitter knows two authorizatiom URLs. /authorize and
# /authenticate. They mostly work the same, but for sign
# on /authenticate is expected because this will give
# the user a slightly different
# user interface on the twitter side.
authorize_url='https://api.twitter.com/oauth/authenticate',
# the consumer keys from the twitter application registry.
consumer_key=c_k, # app.config['TWITTER_CONSUMER_KEY'],
consumer_secret=c_s) # app.config['TWITTER_CONSUMER_KEY']
class Facebook:
oauth = OAuth()
def __init__(self, c_k, c_s):
self.oauth = self.oauth.remote_app(
'facebook',
base_url='https://graph.facebook.com/',
request_token_url=None,
access_token_url='/oauth/access_token',
authorize_url='https://www.facebook.com/dialog/oauth',
consumer_key=c_k, # app.config['F | ACEBOOK_APP_ID'],
consumer_secret=c_s, # app.config['FACEBOOK_APP_SECRET']
request_token_params={'scope': 'email'} | )
def unicode_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
# This code is taken from http://docs.python.org/library/csv.html#examples
# csv.py doesn't do Unicode; encode temporarily as UTF-8:
csv_reader = csv.reader(utf_8_encoder(unicode_csv_data),
dialect=dialect, **kwargs)
for row in csv_reader:
|
alexandreferreira/groovedowndl | djangogroovedown/servicos/views.py | Python | apache-2.0 | 2,206 | 0.002267 | # Create your views here.
import json
import os
from django.http.response import HttpResponse, Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_exempt
from djangogroovedown.utils import download_music_temp_file, search_for_musics
from grooveshark import Client
from grooveshark.classes.song import Song
from django.core.servers.basehttp import Fi | leWrapper
def index(request):
return render_to_respons | e('index.html', {}, context_instance=RequestContext(request))
def get_list_popular_music(request):
perido = request.GET.get('period')
client = Client()
client.init()
if perido == '1':
popular_music = client.popular(period=Client.DAILY)
else:
popular_music = client.popular(period=Client.MONTHLY)
musicas = []
for musica in popular_music:
musicas.append(musica.export())
return HttpResponse(json.dumps(musicas), mimetype="application/json")
def search_musics(request):
query = request.GET.get('busca')
if query:
tipo = request.GET.get('tipo')
musicas = search_for_musics(query, tipo)
return HttpResponse(json.dumps({'data': musicas, 'success': True}, encoding="utf-8"),
mimetype="application/json")
else:
return HttpResponse(json.dumps({'success': False, 'data': []}, encoding="utf-8"),
mimetype="application/json")
@csrf_exempt
def download_musics(request):
musicas = json.loads(request.GET.get('musicas', '[]'))
if musicas:
client = Client()
client.init()
musics_list = []
for musica in musicas:
song = Song.from_export(musica, client.connection)
musics_list.append(song)
zip_path = download_music_temp_file(musics_list)
response = HttpResponse(FileWrapper(file(zip_path)), content_type='application/zip',
mimetype="application/zip")
response['Content-Disposition'] = 'attachment; filename=myfile.zip'
response['Content-Length'] = os.path.getsize(zip_path)
return response
else:
raise Http404
|
Danielhiversen/home-assistant | tests/components/directv/test_init.py | Python | apache-2.0 | 1,186 | 0 | """Tests for the DirecTV integration."""
from homeassistant.components.directv.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from tests.components.directv import setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
# pylint: disable=redefined-outer-name
async def test_config_entry_not_ready(
hass: HomeAssistant, aioclient_mock: AiohttpC | lientMocker
) -> None:
"""Test the DirecTV configuration entry not ready."""
entry = await setup_integration(hass, aioclient_mock, setup_error=True)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None: |
"""Test the DirecTV configuration entry unloading."""
entry = await setup_integration(hass, aioclient_mock)
assert entry.entry_id in hass.data[DOMAIN]
assert entry.state is ConfigEntryState.LOADED
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.entry_id not in hass.data[DOMAIN]
assert entry.state is ConfigEntryState.NOT_LOADED
|
orientechnologies/pyorient | pyorient/messages/commands.py | Python | apache-2.0 | 19,879 | 0.000755 | # -*- coding: utf-8 -*-
from .database import BaseMessage
from .records import RecordUpdateMessage, RecordDeleteMessage, RecordCreateMessage
from ..exceptions import PyOrientBadMethodCallException
from ..constants import COMMAND_OP, FIELD_BOOLEAN, FIELD_BYTE, FIELD_CHAR, \
FIELD_INT, FIELD_LONG, FIELD_SHORT, FIELD_STRING, QUERY_SYNC, FIELD_BYTES, \
TX_COMMIT_OP, QUERY_GREMLIN, QUERY_ASYNC, QUERY_CMD, QUERY_TYPES, \
QUERY_SCRIPT
from ..utils import need_connected, need_db_opened, dlog
__author__ = 'Ostico <ostico@gmail.com>'
#
# COMMAND_OP
#
# Executes remote commands:
#
# Request: (mode:byte)(class-name:string)(command-payload-length:int)(command-payload)
# Response:
# - synchronous commands: [(synch-result-type:byte)[(synch-result-content:?)]]+
# - asynchronous commands: [(asynch-result-type:byte)[(asynch-result-content:?)]*]
# (pre-fetched-record-size.md)[(pre-fetched-record)]*+
#
# Where the request:
#
# mode can be 'a' for asynchronous mode and 's' for synchronous mode
# class-name is the class name of the command implementation.
# There are short form for the most common commands:
# q stands for query as idempotent command. It's like passing
# com.orientechnologies.orient.core.sql.query.OSQLSynchQuery
# c stands for command as non-idempotent command (insert, update, etc).
# It's like passing com.orientechnologies.orient.core.sql.OCommandSQL
# s stands for script. It's like passing
# com.orientechnologies.orient.core.command.script.OCommandScript.
# Script commands by using any supported server-side scripting like Javascript command. Since v1.0.
# any other values is the class name. The command will be created via
# reflection using the default constructor and invoking the fromStream() method against it
# command-payload is the command's serialized payload (see Network-Binary-Protocol-Commands)
# Response is different for synchronous and asynchronous request:
# synchronous:
# synch-result-type can be:
# 'n', means null result
# 'r', means single record returned
# 'l', collection of records. The format is:
# an integer to indicate the collection size
# all the records one by one
# 'a', serialized result, a byte[] is sent
# synch-result-content, can only be a record
# pre-fetched-record-size, as the number of pre-fetched records not
# directly part of the result set but joined to it by fetching
# pre-fetched-record as the pre-fetched record content
# asynchronous:
# asynch-result-type can be:
# 0: no records remain to be fetched
# 1: a record is returned as a resultset
# 2: a record is returned as pre-fetched to be loaded in client's cache only.
# It's not part of the result set but the client knows that it's available for later access
# asynch-result-content, can only be a record
#
class CommandMessage(BaseMessage):
def __init__(self, _orient_socket):
super(CommandMessage, self).__init__(_orient_socket)
self._query = ''
self._limit = 20
self._fetch_plan = '*:0'
self._command_type = QUERY_SYNC
self._mod_byte = 's'
self._append((FIELD_BYTE, COMMAND_OP))
@need_db_opened
def prepare(self, params=None):
if isinstance(params, tuple) or isinstance(params, list):
try:
self.set_command_type(params[0])
self._query = params[1]
self._limit = params[2]
self._fetch_plan = params[3]
# callback function use to operate
# over the async fetched records
self.set_callback(params[4])
except IndexError:
# Use default for non existent indexes
pass
if self._command_type == QUERY_CMD \
or self._command_type == QUERY_SYNC \
or self._command_type == QUERY_SCRIPT \
or self._command_type == QUERY_GREMLIN:
self._mod_byte = 's'
else:
if self._callback is None:
raise PyOrientBadMethodCallException("No callback was provided.", [])
self._mod_byte = 'a'
_payload_definition = [
(FIELD_STRING, self._command_type),
(FIELD_STRING, self._query)
]
if self._command_type == QUERY_ASYNC \
or self._command_type == QUERY_SYNC \
or self._command_type == QUERY_GREMLIN:
# a limit specified in a sql string should always override a
# limit parameter pass to prepare()
if ' LIMIT ' not in self._query.upper() or self._command_type == QUERY_GREMLIN:
_payload_definition.append((FIELD_INT, self._limit))
else:
_payload_definition.append((FIELD_INT, -1))
_payload_definition.append((FIELD_STRING, self._fetch_plan))
if self._command_type == QUERY_SCRIPT:
_payload_definition.insert(1, (FIELD_STRING, 'sql'))
_payload_definition.append((FIELD_INT, 0))
payload = b''.join(
self._encode_field(x) for x in _payload_definition
)
self._append((FIELD_BYTE, self._mod_byte))
self._append((FIELD_STRING, payload))
return super(CommandMessage, self).prepare()
def fetch_response(self):
# skip execution in case of transaction
if self._orientSocket.in_transaction is True:
return self
# decode header only
super(CommandMessage, self).fetch_response()
if self._command_type == QUERY_ASYNC:
self._read_async_records()
else:
return self._read_sync()
def set_command_type(self, _command_type):
if _command_type in QUERY_TYPES:
# user choice if present
self._command_type = _command_type
else:
raise PyOrientBadMethodCallException(
_command_type + ' is not a valid command type', []
)
return self
def set_fetch_plan(self, _fetch_plan):
self._fetch_plan = _fetch_plan
return self
def set_query(self, _query):
self._query = _query
return self
def set_limit(self, _limit):
self._limit = _limit
return self
| def _read_sync(self):
# type of response
# decode body char with flag continue ( Header already read )
response_type = self._decode_field(FIELD_CHAR)
if not isinstance(response_type, str):
response_type = response_type.decode()
res = []
if response_type == 'n':
self._append(FIELD_CHAR)
super(Comma | ndMessage, self).fetch_response(True)
# end Line \x00
return None
elif response_type == 'r' or response_type == 'w':
res = [self._read_record()]
self._append(FIELD_CHAR)
# end Line \x00
_res = super(CommandMessage, self).fetch_response(True)
if response_type == 'w':
res = [res[0].oRecordData['result']]
elif response_type == 'a':
self._append(FIELD_STRING)
self._append(FIELD_CHAR)
res = [super(CommandMessage, self).fetch_response(True)[0]]
elif response_type == 'l':
self._append(FIELD_INT)
list_len = super(CommandMessage, self).fetch_response(True)[0]
for n in range(0, list_len):
res.append(self._read_record())
# async-result-type can be:
# 0: no records remain to be fetched
# 1: a record is returned as a result set
# 2: a record is returned as pre-fetched to be loaded in client's
# cache only. It's not part of the result set but the client
# knows that it's available for later access
cached_results = self._read_async_records()
# cache = cached_results['cached']
else:
# this should be never happen, used only to debug the protocol
msg = b''
self._orientSocket._socket.setblocking(0)
m = self._orientSocket.read(1)
while m != "":
msg += m
|
CaliOpen/CaliOpen | src/backend/components/py.pgp/caliopen_pgp/keys/__init__.py | Python | gpl-3.0 | 552 | 0 | # -*- coding: utf-8 -*-
"""
PGP public keys management
"""
from __future__ import absolute_import, unicode_literals
from .rfc7929 import DNSDiscovery
from .hkp import HKPDiscovery
from .keybase import KeybaseDiscovery
from .base import PGPPublicKey, PG | PUserId, DiscoveryResult
from .discoverer import PublicKeyDi | scoverer
from .contact import ContactPublicKeyManager
__all__ = ['DNSDiscovery', 'HKPDiscovery', 'KeybaseDiscovery',
'PGPPublicKey', 'PGPUserId', 'DiscoveryResult',
'PublicKeyDiscoverer', 'ContactPublicKeyManager']
|
xpharry/Udacity-DLFoudation | tutorials/reinforcement/gym/gym/envs/classic_control/mountain_car.py | Python | mit | 4,262 | 0.0061 | """
https://webdocs.cs.ualberta.ca/~sutton/MountainCar/MountainCar1.cp
"""
import math
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
class MountainCarEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 30
}
def __init__(self):
self.min_position = -1.2
self.max_position = 0.6
self.max_speed = 0.07
self.goal_position = 0.5
self.low = np.array([self.min_position, -self.max_speed])
self.high = np.array([self.max_position, self.max_speed])
self.viewer = None
self.action_space = spaces.Discrete(3)
self.observation_space = spaces.Box(self.low, self.high)
self._seed()
self.reset()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def _step(self, action):
assert self.action_space.contains(action), "%r (%s) invalid" % (action, type(action))
position, velocity = self.state
velocity += (action-1)*0.001 + math.cos(3*position)*(-0.0025)
velocity = np.clip(velocity, -self.max_speed, self.max_speed)
position += velocity
position = np.clip(position, self.min_position, self.max_position)
if (position==self.min_position and velocity<0): velocity = 0
done = bool(position >= self.goal_position)
reward = -1.0
self.state = (position, velocity)
return np.array(self.state), reward, done, {}
def _reset(self):
self.state = np.array([self.np_random.uniform(low=-0.6, high=-0.4), 0])
return np.array(self.state)
def _height(self, xs):
return np.sin(3 * xs)*.45+.55
def _render(self, mode='human', close=False):
if close:
if self.viewer is not None:
self.viewer.close()
self.viewer = None
return
screen_width = 600
screen_height = 400
world_width = self.max_position - self.min_position
scale = screen_width/world_width
carwidth=40
carheight=20
if self.viewer is None:
from gym.envs.classic_control import rendering
self.viewer = rendering.Viewer(screen_width, screen_height)
xs = np.linspace(self.min_position, self.max_position, 100)
ys = self._height(xs)
xys = list(zip((xs-self.min_position)*scale, ys*scale))
self.track = rendering.make_polyline(xys)
self.track.set_linewidth(4)
self.viewer.add_geom(self.track)
clearance = 10
l,r,t,b = -carwidth/2, carwidth/2, carheight, 0
car = rendering.FilledPolygon([(l,b), (l,t), (r,t), (r,b)])
car.add_attr(rendering.Transform(translation=(0, clearance)))
self.cartrans = rendering.Transform()
car.add_attr(self.cartrans)
self.viewer.add_geom(car)
frontwheel = rendering.make_circle(carheight/2.5)
frontwheel.set_color(.5, .5, .5)
frontwheel.add_attr(rendering.Transform(translation=(carwidth/4,clearance)))
frontwheel.add_attr(self.cartrans)
self.viewer.add_geom(frontwheel)
backwheel = rendering.make_circle(carheight/2.5)
backwheel.add_attr(rendering.Transform(translation=(-carwidth/4,clearance)))
backwheel.add_attr(self.cartr | ans)
| backwheel.set_color(.5, .5, .5)
self.viewer.add_geom(backwheel)
flagx = (self.goal_position-self.min_position)*scale
flagy1 = self._height(self.goal_position)*scale
flagy2 = flagy1 + 50
flagpole = rendering.Line((flagx, flagy1), (flagx, flagy2))
self.viewer.add_geom(flagpole)
flag = rendering.FilledPolygon([(flagx, flagy2), (flagx, flagy2-10), (flagx+25, flagy2-5)])
flag.set_color(.8,.8,0)
self.viewer.add_geom(flag)
pos = self.state[0]
self.cartrans.set_translation((pos-self.min_position)*scale, self._height(pos)*scale)
self.cartrans.set_rotation(math.cos(3 * pos))
return self.viewer.render(return_rgb_array = mode=='rgb_array')
|
benoitsteiner/tensorflow-opencl | tensorflow/contrib/kfac/python/ops/utils_lib.py | Python | apache-2.0 | 1,520 | 0 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.kfac.python.ops.utils i | mport *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
"SequenceDict",
"setdefault",
"tensors_to_column",
"column_to_tensors",
"kronecker_product",
"layer_params_to_mat2d",
"mat2d_to_layer_params",
"compute_pi",
"posdef_inv",
"posdef_inv_matrix_inverse",
"posdef_inv_cholesky",
"posdef_inv_funcs",
"SubGraph",
"gene | rate_random_signs",
"fwd_gradients",
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
|
alienlike/courier | courier/scripts/create_tables.py | Python | gpl-3.0 | 816 | 0.003676 | import logging
from sqlalchemy import engine_from_config
from courier.scripts import settings
from courier.models import Declarativ | eBase, DBSession, db_views, populate_lookups
LOG = False
def main(DBSession, engine):
# set up logging
if LOG:
logging.basicConfig()
| logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
# build tables & views
db_views.drop_views(engine)
DeclarativeBase.metadata.bind = engine
DeclarativeBase.metadata.drop_all()
DeclarativeBase.metadata.create_all(engine)
db_views.build_views(engine)
# populate lookups
populate_lookups(DBSession)
if __name__ == '__main__':
# configure session
engine = engine_from_config(settings, prefix='sqlalchemy.')
DBSession.configure(bind=engine)
main(DBSession, engine)
|
isobelsv/nyu-python | while_loop.py | Python | mit | 249 | 0.012048 | #!/usr/bin/env python3
import sys
if __name | __ == '__main__':
a_str = sys.argv[1]
b_str = sys.argv[2]
a = int(sys.argv[1])
b = int(sys.argv[2])
while (a <= b):
print("a =" | + str(a) + str(b) + ")")
a = a+1
|
kpech21/Greek-Stemmer | tests/tools/test_text_tools.py | Python | lgpl-3.0 | 2,368 | 0 | # -*- coding: utf-8 -*-
import pytest
from greek_stemmer.tools.text_tools import *
class TestParseText:
def test_parse_word_receives_no_string(self):
assert parse_word(None) == ''
# check accents removal and uppercase letters
parse_word_testdata = [
(' $', '$'),
(' foo ', 'FOO'),
('(25%)', '(25%)'),
(u'\u2167 ί', 'Ⅷ Ι'),
("Greek's", "GREEK'S"),
('κ', 'Κ'),
('ιστορικός', 'ΙΣΤΟΡΙΚΟΣ'),
('Ιστορικός ', 'ΙΣΤΟΡΙΚΟΣ'),
('ΙΣΤΟΡΙΚΌΣ', 'ΙΣΤΟΡΙΚΟΣ'),
('Λαϊκός', 'ΛΑΙΚΟΣ'),
(' ΛΑΪΚΌΣ', 'ΛΑΙΚΟΣ')
]
@pytest.mark.parametrize('word, output', parse_word_testdata)
def test_parse_word_with_various_inputs(self, word, output):
assert parse_word(word) == output
class TestParsePos:
def test_parse_pos_receives_no_string(self):
with pytest.raises(TypeError):
parse_pos(None)
# check accents removal and uppercase letters
parse_pos_testdata = [
(' $', '$'),
(' foo ', 'FOO'),
('(25%)', '(25%)'),
(u'\u2167 ι', 'Ⅷ Ι'),
("Greek's", "GREEK'S"),
('κ', 'Κ'),
('nnsf', 'NNSF'),
('vbfs ', 'VBFS'),
(' prp', 'PRP'),
('Inp', 'INP'),
(' date ', 'DATE')
]
@pytest.mark.parametrize('word, output', parse_pos_testdata)
def test_parse_pos_with_various_inputs(self, word, output):
assert parse_pos(word) == output
class TestIsGreek:
def test_is_greek_receives_no_string(self):
assert is_gre | ek(None) is False
# check accents removal and uppercase letters
parse_is_greek_testdata = [
(' $', False),
('0.5', False),
('foo', False),
('(25%)', False),
(u'\u2167 ί', False),
("Greek's", False),
('κ', True),
('eλληνικά', False),
('EΛΛΗΝΙΚΆ', False),
('ΕΛΛΗΝΙΚΑ', True),
(' ελληνικά', True),
('NOT', False),
('ΝΟΤ', True),
(' Λαϊκός ', True),
('ΛΑΪΚΌΣ', True)
]
@pytes | t.mark.parametrize('word, output', parse_is_greek_testdata)
def test_is_greek_with_various_inputs(self, word, output):
assert is_greek(word) == output
|
PeterWangIntel/crosswalk-webdriver-python | third_party/atoms.py | Python | bsd-3-clause | 417,522 | 0.000091 | __all__ = ["GET_FIRST_CLIENT_RECT", \
"GET_LOCATION_IN_VIEW", \
"GET_PAGE_ZOOM", \
"IS_ELEMENT_CLICKABLE", \
"TOUCH_SINGLE_TAP", \
"CLEAR", \
"CLEAR_LOCAL_STORAGE", \
"CLEAR_SESSION_STORAGE", \
"CLICK", \
"EXECUTE_ASYNC_SCRIPT", \
"EXECUTE_SCRIPT", \
"EXECUTE_SQL", \
"FIND_ELEMENT", \
"FIND_ELEMENTS", \
"GET_APPCACHE_STATUS", \
"GET_ATTRIBUTE", \
"GET_EFFECTIVE_STYLE", \
"GET_IN_VIEW_LOCATION", \
"GET_LOCAL_STORAGE_ITEM", \
"GET_LOCAL_STORAGE_KEY", \
"GET_LOCAL_STORAGE_KEYS", \
"GET_LOCAL_STORAGE_SIZE", \
"GET_SESSION_STORAGE_ITEM", \
"GET_SESSION_STORAGE_KEY", \
"GET_SESSION_STORAGE_KEYS", \
"GET_SESSION_STORAGE_SIZE", \
"GET_LOCATION", \
"GET_SIZE", \
"GET_TEXT", \
"IS_DISPLAYED", \
"IS_ENABLED", \
"IS_ONLINE", \
"IS_SELECTED", \
"REMOVE_LOCAL_STORAGE_ITEM", \
"REMOVE_SESSION_STORAGE_ITEM", \
"SET_LOCAL_STORAGE_ITEM", \
"SET_SESSION_STORAGE_ITEM", \
"SUBMIT"]
GET_FIRST_CLIENT_RECT = \
"function(){return function(){var g=this;\nfunction h(a){var b=typeof a;"\
"if(\"object\"==b)if(a){if(a instanceof Array)return\"array\";if(a insta"\
"nceof Object)return b;var e=Object.prototype.toString.call(a);if(\"[obj"\
"ect Window]\"==e)return\"object\";if(\"[object Array]\"==e||\"number\"="\
"=typeof a.length&&\"undefined\"!=typeof a.splice&&\"undefined\"!=typeof"\
" a.propertyIsEnumerable&&!a.propertyIsEnumerable(\"splice\"))return\"ar"\
"ray\";if(\"[object Function]\"==e||\"undefined\"!=typeof a.call&&\"unde"\
"fined\"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable(\"call"\
"\"))return\"function\"}else return\"null\";else if(\"function\"==\nb&&"\
"\"undefined\"==typeof a.call)return\"object\";return b};var k;function "\
"l(a,b){this.x=void 0!==a?a:0;this.y=void 0!==b?b:0}l.prototype.toString"\
"=function(){return\"(\"+this.x+\", \"+this.y+\")\"};function m(a){retur"\
"n 9==a.nodeType?a:a.ownerDocument||a.document}function n(a){this.b=a||g"\
".document||document}function p(a){var b=a.b;a=b.body;b=b.parentWindow||"\
"b.defaultView;return new l(b.pageXOffset||a.scrollLeft,b.pageYOffset||a"\
".scrollTop)};function q(a,b,e,d){this.left=a;this.top=b;this.width=e;th"\
"is.height=d}q.prototype.toString=function(){return\"(\"+ | this.left+\", "\
"\"+this.top+\" - \"+this.width+\"w x \"+this.height+\"h)\"};function s("\
"a){var b;a:{b=m(a);if(b.defaultView&&b.defaultView.getComputedStyle&&(b"\
"=b.defaultView.getComputedStyle(a,null))){b=b.position||b.getPropertyVa"\
"lue(\"position\")||\"\";break a}b=\"\"}return b||(a.currentStyle?a.curr"\
"entStyle.position:null)||a.style&&a.style.positi | on}function t(a){var b;"\
"try{b=a.getBoundingClientRect()}catch(e){return{left:0,top:0,right:0,bo"\
"ttom:0}}return b}\nfunction u(a){var b=m(a),e=s(a),d=\"fixed\"==e||\"ab"\
"solute\"==e;for(a=a.parentNode;a&&a!=b;a=a.parentNode)if(e=s(a),d=d&&\""\
"static\"==e&&a!=b.documentElement&&a!=b.body,!d&&(a.scrollWidth>a.clien"\
"tWidth||a.scrollHeight>a.clientHeight||\"fixed\"==e||\"absolute\"==e||"\
"\"relative\"==e))return a;return null};function v(a){var b=a.getClientR"\
"ects();if(0==b.length)throw Error(\"Element does not have any client re"\
"cts\");b=b[0];if(1==a.nodeType)if(a.getBoundingClientRect)a=t(a),a=new "\
"l(a.left,a.top);else{var e=p(a?new n(m(a)):k||(k=new n));var d=m(a),z=s"\
"(a),c=new l(0,0),r=(d?m(d):document).documentElement;if(a!=r)if(a.getBo"\
"undingClientRect)a=t(a),d=p(d?new n(m(d)):k||(k=new n)),c.x=a.left+d.x,"\
"c.y=a.top+d.y;else if(d.getBoxObjectFor)a=d.getBoxObjectFor(a),d=d.getB"\
"oxObjectFor(r),c.x=a.screenX-d.screenX,c.y=a.screenY-\nd.screenY;else{v"\
"ar f=a;do{c.x+=f.offsetLeft;c.y+=f.offsetTop;f!=a&&(c.x+=f.clientLeft||"\
"0,c.y+=f.clientTop||0);if(\"fixed\"==s(f)){c.x+=d.body.scrollLeft;c.y+="\
"d.body.scrollTop;break}f=f.offsetParent}while(f&&f!=a);\"absolute\"==z&"\
"&(c.y-=d.body.offsetTop);for(f=a;(f=u(f))&&f!=d.body&&f!=r;)c.x-=f.scro"\
"llLeft,c.y-=f.scrollTop}a=new l(c.x-e.x,c.y-e.y)}else e=\"function\"==h"\
"(a.a),c=a,a.targetTouches?c=a.targetTouches[0]:e&&a.a().targetTouches&&"\
"(c=a.a().targetTouches[0]),a=new l(c.clientX,c.clientY);return new q(b."\
"left-\na.x,b.top-a.y,b.right-b.left,b.bottom-b.top)}var w=[\"_\"],x=g;w"\
"[0]in x||!x.execScript||x.execScript(\"var \"+w[0]);for(var y;w.length&"\
"&(y=w.shift());)w.length||void 0===v?x=x[y]?x[y]:x[y]={}:x[y]=v;; retur"\
"n this._.apply(null,arguments);}.apply({navigator:typeof window!=undefi"\
"ned?window.navigator:null,document:typeof window!=undefined?window.docu"\
"ment:null}, arguments);}"
GET_LOCATION_IN_VIEW = \
"function(){return function(){var k=this;\nfunction l(a){var b=typeof a;"\
"if(\"object\"==b)if(a){if(a instanceof Array)return\"array\";if(a insta"\
"nceof Object)return b;var c=Object.prototype.toString.call(a);if(\"[obj"\
"ect Window]\"==c)return\"object\";if(\"[object Array]\"==c||\"number\"="\
"=typeof a.length&&\"undefined\"!=typeof a.splice&&\"undefined\"!=typeof"\
" a.propertyIsEnumerable&&!a.propertyIsEnumerable(\"splice\"))return\"ar"\
"ray\";if(\"[object Function]\"==c||\"undefined\"!=typeof a.call&&\"unde"\
"fined\"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable(\"call"\
"\"))return\"function\"}else return\"null\";else if(\"function\"==\nb&&"\
"\"undefined\"==typeof a.call)return\"object\";return b};var m;function "\
"n(a,b){this.x=void 0!==a?a:0;this.y=void 0!==b?b:0}n.prototype.toString"\
"=function(){return\"(\"+this.x+\", \"+this.y+\")\"};function p(a,b){thi"\
"s.width=a;this.height=b}p.prototype.toString=function(){return\"(\"+thi"\
"s.width+\" x \"+this.height+\")\"};function q(a){return a?new r(s(a)):m"\
"||(m=new r)}function s(a){return 9==a.nodeType?a:a.ownerDocument||a.doc"\
"ument}function r(a){this.a=a||k.document||document}function t(a){a=(a.a"\
".parentWindow||a.a.defaultView||window).document;a=\"CSS1Compat\"==a.co"\
"mpatMode?a.documentElement:a.body;return new p(a.clientWidth,a.clientHe"\
"ight)}function u(a){var b=a.a;a=b.body;b=b.parentWindow||b.defaultView;"\
"return new n(b.pageXOffset||a.scrollLeft,b.pageYOffset||a.scrollTop)};f"\
"unction v(a,b,c,d){this.top=a;this.right=b;this.bottom=c;this.left=d}v."\
"prototype.toString=function(){return\"(\"+this.top+\"t, \"+this.right+"\
"\"r, \"+this.bottom+\"b, \"+this.left+\"l)\"};function w(a,b,c,d){this."\
"left=a;this.top=b;this.width=c;this.height=d}w.prototype.toString=funct"\
"ion(){return\"(\"+this.left+\", \"+this.top+\" - \"+this.width+\"w x \""\
"+this.height+\"h)\"};function x(a,b){var c=s(a);return c.defaultView&&c"\
".defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(a,null"\
"))?c[b]||c.getPropertyValue(b)||\"\":\"\"}function y(a){return x(a,\"po"\
"sition\")||(a.currentStyle?a.currentStyle.position:null)||a.style&&a.st"\
"yle.position}function z(a){var b;try{b=a.getBoundingClientRect()}catch("\
"c){return{left:0,top:0,right:0,bottom:0}}return b}\nfunction A(a){var b"\
"=s(a),c=y(a),d=\"fixed\"==c||\"absolute\"==c;for(a=a.parentNode;a&&a!=b"\
";a=a.parentNode)if(c=y(a),d=d&&\"static\"==c&&a!=b.documentElement&&a!="\
"b.body,!d&&(a.scrollWidth>a.clientWidth||a.scrollHeight>a.clientHeight|"\
"|\"fixed\"==c||\"absolute\"==c||\"relative\"==c))return a;return null}"\
"\nfunction B(a){var b=s(a),c=y(a),d=new n(0,0),f=(b?s(b):document).docu"\
"mentElement;if(a==f)return d;if(a.getBoundingClientRect)a=z(a),b=u(q(b)"\
"),d.x=a.left+b.x,d.y=a.top+b.y;else if(b.getBoxObjectFor)a=b.getBoxObje"\
"ctFor(a),b=b.getBoxObjectFor(f),d.x=a.screenX-b.screenX,d.y=a.screenY-b"\
".screenY;else{var e=a;do |
dhermes/google-cloud-python | dns/tests/unit/test__http.py | Python | apache-2.0 | 2,719 | 0.000736 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
class TestConnection(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.dns._http import Connection
return Connection
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_build_api_url_no_extra_query_params(self):
conn = self._make_one(object())
URI = "/".join([conn.API_BASE_URL, "dns", conn.API_VERSION, "foo"])
self.assertEqual(conn.build_api_url("/foo"), URI)
def test_build_api_url_w_extra_query_params(self):
from six.moves.urllib.parse import parse_qsl
from six.moves.urllib.parse import urlsplit
conn = self._make_one(object())
uri = conn.build_api_url("/foo", {"bar": "baz"})
scheme, netloc, path, qs, _ = urlsplit(uri)
self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL)
self.assertEqual(path, "/".join(["", "dns", conn.API_VERSION, "foo"]))
parms = dict(parse_qsl(qs))
self.assertEqual(parms["bar"], "baz")
def test_extra_headers(self):
import requests
from google.cloud import _http as base_http
from google.cloud.dns import _http as MUT
http = mock.create_autospec(requests.Session, instance=True)
response = requests.Response()
response.status_code = 200
response_data = b"brent-spiner"
response._content = response_data
http.request.return_value = response
client = mock.Mock(_http=http, spec=["_http"])
conn = self._make_one(client)
req_data = "req-data-boring"
result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False)
self.assertEqual(result, response_data)
expected_headers = {
| "Accept-Encoding": "gzip",
base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO,
"User-Agent": conn.USER_AGENT,
}
expected_uri = conn.build_api_url("/rainbow")
http.request.assert_called_once_with(
data=req_data, headers=expected_headers, method="GET" | , url=expected_uri
)
|
chgans/django-google-dork | django_google_dork/migrations/0001_initial.py | Python | bsd-2-clause | 4,143 | 0.003621 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_google_dork.models
import model_utils.fields
import django.utils.timezone
class Migration(migrations.Migration):
replaces = [('django_google_dork', '0001_initial'), ('django_google_dork', '0002_auto_20141116_1551'), ('django_google_dork', '0003_run_engine')]
dependencies = [
]
operations = [
migrations.CreateModel(
name='Campaign',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('name', dj | ango_google_dork.models.CampaignNameField(unique=True, max_length=32)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Dork',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=dj | ango.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('query', django_google_dork.models.DorkQueryField(max_length=256)),
('campaign', models.ForeignKey(to='django_google_dork.Campaign')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Result',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=1024)),
('summary', models.TextField()),
('url', models.URLField(max_length=1024)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Run',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('dork', models.ForeignKey(to='django_google_dork.Dork')),
('result_set', models.ManyToManyField(to='django_google_dork.Result')),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='result',
unique_together=set([('title', 'summary', 'url')]),
),
migrations.AlterUniqueTogether(
name='dork',
unique_together=set([('campaign', 'query')]),
),
migrations.CreateModel(
name='SearchEngine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(unique=True, max_length=32)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='campaign',
name='enabled',
field=models.BooleanField(default=True),
preserve_default=True,
),
migrations.AddField(
model_name='dork',
name='enabled',
field=models.BooleanField(default=True),
preserve_default=True,
),
migrations.AddField(
model_name='run',
name='engine',
field=models.ForeignKey(default=None, to='django_google_dork.SearchEngine'),
preserve_default=False,
),
]
|
noamraph/dreampie | dreampielib/data/subp_main.py | Python | gpl-3.0 | 1,566 | 0.005109 | # Copyright 2009 Noam Yorav-Raphael
#
# This file is part of DreamPie.
#
# DreamPie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DreamPie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more d | etails.
#
# You should have received a copy of the GNU General Public License
# along with DreamPie. If not, see <http://www.gnu.org/licenses/>.
# This file is a script (not a module) run by the DreamPie GUI.
# It expects one argument: the port to connect to.
# It creates a package called dreampielib from subp-py2.zip or subp-py3.zip
# (which are expected to be in the directory of __file__) | ,
# and runs dreampielib.subprocess.main(port).
import sys
from os.path import abspath, join, dirname
def main():
port = int(sys.argv[1])
py_ver = sys.version_info[0]
lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver))
sys.path.insert(0, lib_name)
from dreampielib.subprocess import main as subprocess_main
del sys.path[0]
if sys.version_info[:2] == (3, 0):
sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n"
"Please upgrade to Python 3.1.\n")
subprocess_main(port)
if __name__ == '__main__':
main()
|
D4wN/brickv | src/build_data/windows/OpenGL/GL/ARB/texture_border_clamp.py | Python | gpl-2.0 | 1,249 | 0.015212 | '''OpenGL extension ARB.texture_border_clamp
This module customises the behaviour of the
OpenGL.raw.GL.ARB.texture_border_clamp to provide a more
Python-friendly API
Overview (from the spec)
The base OpenGL provides clamping such that the texture coordinates are
limited to exactly the ran | ge [0,1]. When a texture coordinate is clamped
using this algorithm, the tex | ture sampling filter straddles the edge of
the texture image, taking 1/2 its sample values from within the texture
image, and the other 1/2 from the texture border. It is sometimes
desirable for a texture to be clamped to the border color, rather than to
an average of the border and edge colors.
This extension defines an additional texture clamping algorithm.
CLAMP_TO_BORDER_ARB clamps texture coordinates at all mipmap levels such
that NEAREST and LINEAR filters return only the color of the border
texels.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/texture_border_clamp.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.ARB.texture_border_clamp import *
### END AUTOGENERATED SECTION |
scott-s-douglas/SWAPR | sqlite1.py | Python | gpl-2.0 | 12,933 | 0.03232 | import sqlite3
import os.path
import sys
import random
def makeDatabase(databaseName):
if databaseName[-3:] != ".db":
databaseName = databaseName + ".db"
conn = sqlite3.connect(databaseName)
conn.commit()
conn.close()
def listToString(list):
string = ""
for i in list:
string += str(i)+"\t"
return string[:-1]
def stringToList(string):
list = [str(line) for line in string.split('\t')]
return list
#class for connecting, inserting, and retrieving information from a sqlite3 database
class SqliteDB:
#connects to the database, alters its name if named incorrectly
def __init__(self, databaseName):
if databaseName[-3:] != ".db":
databaseName = databaseName + ".db"
if os.path.isfile(databaseName):
self.databaseName = databaseName;
self.conn = sqlite3.connect(self.databaseName)
self.cursor = self.conn.cursor()
else:
#sees if database name is unique, so it doesn't overwrite anything
sys.exit("This database does not exist, use the makeDatabase(databaseName) to create it")
def createTables(self):
#creates tables if they do not exist
self.cursor.execute("CREATE TABLE IF NOT EXISTS students (wID text, email text, UNIQUE(wID, email) ON CONFLICT ABORT)")
self.cursor.execute("CREATE TABLE IF NOT EXISTS submissions (labNumber int, wID text, URL text, metadata text, URLsToGrade text)")
self.cursor.execute("CREATE TABLE IF NOT EXISTS uniqueStudentURL (labNumber int, wID text, URL text, UNIQUE(URL) ON CONFLICT ABORT)")
self.cursor.execute("CREATE TABLE IF NOT EXISTS experts (labNumber int, URL text, grade text, hidden int, PRIMARY KEY(labNumber, URL, hidden))")
self.cursor.execute("CREATE TABLE IF NOT EXISTS responses (labNumber int, URL text, wID text, response text, practice boolean, PRIMARY KEY(labNumber, URL, response))")
self.cursor.execute("CREATE TABLE IF NOT EXISTS questions (labNumber int, questionNumber int, questionWebassignNumber int, practice boolean)")
weightString = ''
for i in range(6):
weightString += ', weight'+str(i+1)+' num'
self.cursor.execute("CREATE TABLE IF NOT EXISTS weightsBIBI (labNumber int, wID text"+weightString+", weightSum num)")
self.cursor.execute("CREATE TABLE IF NOT EXISTS rubrics (labNumber int, itemIndex int, itemType text, itemValues text, graded boolean, itemPrompt text)")
self.cursor.execute("CREATE TABLE IF NOT EXISTS grades(labNumber int, wID text, URL text, finalGrade number, finalGradeVector text, rawGrade number, rawGradeVector text)")
##check to see if the tables have already been created
#creates columns in tables for each lab specifie | d
self.conn.commit()
#adds a person into the database, works for both new users and existing ones
def addEntry(self, wID, URL, labNumber, metadata = None):
if self.databaseName != None and self.conn != None and self.cursor !=None:
#If the student did not submit a URL (aka the inputted URL is '')
if URL == '':
self.cursor.execute("INSERT INTO submissions VALUES(?,?,?,?,?)", [labNumber, wID, URL,metadata,''])
#try putting the student and i | ts URL into the uniqueStudentURL database to check if the URL is unique
else:
try:
self.cursor.execute("INSERT INTO uniqueStudentURL VALUES (?,?,?)", [labNumber, wID, URL])
#if there is no error in inserting to a table where URL has to be unique, put it in the actual student database
self.cursor.execute("INSERT INTO submissions VALUES(?,?,?,?,?)", [labNumber, wID, URL,metadata,''])
#if the try fails, that means that the URL is already in the db, duplicate URL found!
except:
self.cursor.execute("SELECT wID FROM uniqueStudentURL WHERE URL=?", [URL])
print "URL: " + URL + " was initially submitted by: " + self.cursor.fetchall()[0][0]
URL = "DUPLICATEURL"
self.cursor.execute("INSERT INTO submissions VALUES(?,?,?,?,?)", [labNumber, wID, URL,metadata,''])
self.conn.commit()
def addEmail(self, wID, email):
try:
self.cursor.execute("INSERT INTO students VALUES (?,?,?)", [wID, email])
except:
print "wID: " + wID + " or email: " + email + " already in database."
#retrieves URL for a specific student and specific lab number
def getURL(self, wID, labNumber):
self.cursor.execute("SELECT URL FROM submissions WHERE labNumber=? AND wID=?", [labNumber, wID])
URL = self.cursor.fetchone();
if URL is not None:
return (URL[0])
else:
return None
def addExpertURL(self, labNumber, URL, grade, hidden):
self.cursor.execute("SELECT * FROM experts WHERE URL = ?", [URL])
#adds in a user if not in database already
presentURL = self.cursor.fetchone()
if presentURL == None:
self.cursor.execute("INSERT INTO experts VALUES (?, ?, ?, ?)", [labNumber, URL, listToString(grade), hidden])
self.conn.commit()
elif presentURL == URL:
print "The URL " + URL + " is already in the expert database"
else:
sys.exit("Trying to overrite")
##find a way to make seperate expert tables for each lab, and then join them together to prevent the staggaring of grades in the excel sheet
#self.cursor.execute("SELECT * FROM expert WHERE Lab1Grade")
#print self.cursor.fetchall()
#query = ("SELECT {0} FROM expert WHERE wID
def getExpertURLs(self, labNumber):
self.cursor.execute("SElECT URL, grade FROM experts where labNumber=?", [labNumber])
URLsAndGrades = {}
for d in self.cursor.fetchall():
URLsAndGrades[str(d[0])] = stringToList(str(d[1]))
return URLsAndGrades
def finalize(self, labNumber, seed, N, MOOC=False):
##randomize the youtube URLs
#for each wID
#put that into the databse under the student ID
self.cursor.execute("SELECT URL FROM experts WHERE labNumber=? and hidden=0", [labNumber])
expertURL = [str(d[0]) for d in self.cursor.fetchall()]
# find all the hidden expert videos
self.cursor.execute("SELECT URL FROM experts WHERE labNumber=? and hidden=1", [labNumber])
hiddenURL = [str(d[0]) for d in self.cursor.fetchall()]
#get all the studnet URLs
self.cursor.execute("SELECT URL from submissions WHERE labNumber=?", [labNumber])
data = [str(d[0]) for d in self.cursor.fetchall()]
#assign the students whos videos are designated expert graded URLs to grade, and remove them from the URL pool retrieved above
if len(expertURL) + N + 1 <= len(data):
pseudoURL = {}
for d in expertURL:
#if the expertURL is not in the data list, then it is a video that is not submitted by a student this sem
#semester, in which case, we skip it
if d in data:
self.cursor.execute("SELECT wID FROM submissions WHERE URL=?", [d])
indice = (data.index(d) + 1) % len(data)
while data[indice] in expertURL or data[indice] in hiddenURL:
indice = (indice + 1) % len(data)
pseudoURL[d] = data[indice]
data.remove(d)
for d in hiddenURL:
if d in data:
indice = (data.index(d) + 1) % len(data)
while data[indice] in expertURL or data[indice] in hiddenURL:
indice = (indice + 1) % len(data)
pseudoURL[d] = data[indice]
data.remove(d)
self.cursor.execute("SELECT wID FROM submissions WHERE labNumber=? and URL is ''", [labNumber])
noURLSubmitted = [str(d[0]) for d in self.cursor.fetchall()]
wIDPseudoURL = {}
if(data.count('') > 0) and not MOOC:
for d in noURLSubmitted:
indice = (data.index('') + 1) % len(data)
while data[indice] == '':
indice = (indice + 1) % len(data)
wIDPseudoURL[d] = data[indice]
data.remove('')
else:
while '' in data:
data.remove('')
self.cursor.execute("SELECT wID FROM submissions WHERE labNumber=? AND URL=?", [labNumber, "DUPLICATEURL"])
noURLSubmitted = [str(d[0]) for d in self.cursor.fetchall()]
if(data.count("DUPLICATEURL") > 0) and not MOOC:
for d in noURLSubmitted:
indice = (data.index("DUPLICATEURL") + 1) % len(data)
while data[indice] == "DUPLICATEURL":
indice = (indice + 1) % len(data)
wIDPseudoURL[d] = data[indice]
data.remove("DUPLICATEURL")
else:
while '' in data:
data.remove('')
#self.cursor.execute(query)
random.shuffle(data)
selectFrom = data + data[:N + len(expertURL) + 1]
if len(pseudoURL.keys()) > 0:
# params = ("Lab" + str(labNumber) + "URLS |
datawire/quark | quarkc/test/ffi/expected/py/package/package_md/__init__.py | Python | apache-2.0 | 3,603 | 0.006939 | from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from builtins import str as unicode
from quark_runtime import *
_lazyImport.plug("package_md.test_Test_go_Method")
import quark.reflect
class test_Test_go_Method(quark.reflect.Method):
def _init(self):
quark.reflect.Method._init(self)
def __init__(self):
super(test_Test_go_Method, self).__init__(u"quark.void", u"go", _List([]));
def invoke(self, object, args):
obj = _cast(object, lambda: test.Test);
(obj).go();
return None
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
p | ass
class test_Test | (quark.reflect.Class):
def _init(self):
quark.reflect.Class._init(self)
def __init__(self):
super(test_Test, self).__init__(u"test.Test");
(self).name = u"test.Test"
(self).parameters = _List([])
(self).fields = _List([quark.reflect.Field(u"quark.String", u"name")])
(self).methods = _List([test_Test_go_Method()])
(self).parents = _List([u"quark.Object"])
def construct(self, args):
return test.Test()
def isAbstract(self):
return False
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
test_Test.singleton = test_Test()
class test_subtest_Test_go_Method(quark.reflect.Method):
def _init(self):
quark.reflect.Method._init(self)
def __init__(self):
super(test_subtest_Test_go_Method, self).__init__(u"quark.void", u"go", _List([]));
def invoke(self, object, args):
obj = _cast(object, lambda: test.subtest.Test);
(obj).go();
return None
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
class test_subtest_Test(quark.reflect.Class):
def _init(self):
quark.reflect.Class._init(self)
def __init__(self):
super(test_subtest_Test, self).__init__(u"test.subtest.Test");
(self).name = u"test.subtest.Test"
(self).parameters = _List([])
(self).fields = _List([quark.reflect.Field(u"quark.int", u"size")])
(self).methods = _List([test_subtest_Test_go_Method()])
(self).parents = _List([u"quark.Object"])
def construct(self, args):
return test.subtest.Test()
def isAbstract(self):
return False
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
test_subtest_Test.singleton = test_subtest_Test()
class Root(_QObject):
def _init(self):
pass
def __init__(self): self._init()
def _getClass(self):
return _cast(None, lambda: unicode)
def _getField(self, name):
return None
def _setField(self, name, value):
pass
Root.test_Test_md = test_Test.singleton
Root.test_subtest_Test_md = test_subtest_Test.singleton
def _lazy_import_test():
import test
globals().update(locals())
_lazyImport("import test", _lazy_import_test)
def _lazy_import_test_subtest():
import test.subtest
globals().update(locals())
_lazyImport("import test.subtest", _lazy_import_test_subtest)
_lazyImport.pump("package_md.test_Test_go_Method")
|
ric2b/Vivaldi-browser | chromium/build/skia_gold_common/skia_gold_properties.py | Python | bsd-3-clause | 4,742 | 0.011177 | # Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Class for storing Skia Gold comparison properties.
Examples:
* git revision being tested
* Whether the test is being run locally or on a bot
* What the continuous integration system is
"""
import logging
import os
class SkiaGoldProperties(object):
def __init__(self, args):
"""Abstract class to validate and store properties related to S | kia Gold.
Args:
args: The parsed arguments from an argparse.ArgumentParser.
"""
self._git_revision = None
self._issue = None
self._patchset = None
self._job_id = None
self._local_pixel_tests = None
self._no_luci_auth = None
self._bypass_skia_gold_functionality = None
self._code_review_system = None
self._continuous_integration_system = None
self._local_png_directory = None
self._InitializeProperties(args)
def IsTryjobRun(s | elf):
return self.issue is not None
@property
def continuous_integration_system(self):
return self._continuous_integration_system or 'buildbucket'
@property
def code_review_system(self):
return self._code_review_system or 'gerrit'
@property
def git_revision(self):
return self._GetGitRevision()
@property
def issue(self):
return self._issue
@property
def job_id(self):
return self._job_id
@property
def local_pixel_tests(self):
return self._IsLocalRun()
@property
def local_png_directory(self):
return self._local_png_directory
@property
def no_luci_auth(self):
return self._no_luci_auth
@property
def patchset(self):
return self._patchset
@property
def bypass_skia_gold_functionality(self):
return self._bypass_skia_gold_functionality
@staticmethod
def _GetGitOriginMainHeadSha1():
raise NotImplementedError()
def _GetGitRevision(self):
if not self._git_revision:
# Automated tests should always pass the revision, so assume we're on
# a workstation and try to get the local origin/master HEAD.
if not self._IsLocalRun():
raise RuntimeError(
'--git-revision was not passed when running on a bot')
revision = self._GetGitOriginMainHeadSha1()
if not revision or len(revision) != 40:
raise RuntimeError(
'--git-revision not passed and unable to determine from git')
self._git_revision = revision
return self._git_revision
def _IsLocalRun(self):
if self._local_pixel_tests is None:
# Look for the presence of the SWARMING_SERVER environment variable as a
# heuristic to determine whether we're running on a workstation or a bot.
# This should always be set on swarming, but would be strange to be set on
# a workstation.
self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ
if self._local_pixel_tests:
logging.warning(
'Automatically determined that test is running on a workstation')
else:
logging.warning(
'Automatically determined that test is running on a bot')
return self._local_pixel_tests
def _InitializeProperties(self, args):
if hasattr(args, 'local_pixel_tests'):
# If not set, will be automatically determined later if needed.
self._local_pixel_tests = args.local_pixel_tests
if hasattr(args, 'skia_gold_local_png_write_directory'):
self._local_png_directory = args.skia_gold_local_png_write_directory
if hasattr(args, 'no_luci_auth'):
self._no_luci_auth = args.no_luci_auth
if hasattr(args, 'bypass_skia_gold_functionality'):
self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
if hasattr(args, 'code_review_system'):
self._code_review_system = args.code_review_system
if hasattr(args, 'continuous_integration_system'):
self._continuous_integration_system = args.continuous_integration_system
# Will be automatically determined later if needed.
if not hasattr(args, 'git_revision') or not args.git_revision:
return
self._git_revision = args.git_revision
# Only expected on tryjob runs.
if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
return
self._issue = args.gerrit_issue
if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
raise RuntimeError(
'--gerrit-issue passed, but --gerrit-patchset not passed.')
self._patchset = args.gerrit_patchset
if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
raise RuntimeError(
'--gerrit-issue passed, but --buildbucket-id not passed.')
self._job_id = args.buildbucket_id
|
codedstructure/rpcpdb | rpcpdb/cli_pyro.py | Python | mit | 250 | 0.004 | #!/ | usr/bin/env python -u
import Pyro.core
from rpcpdb import cli
def get_api_connection():
return Pyro.core.getProxyForURI("PYROLOC://localhost:7766/rpc")
cli.get_api_connection = get_api_connectio | n
if __name__ == '__main__':
cli.main()
|
brian-rose/climlab | climlab/radiation/rrtm/_rrtmg_lw/setup.py | Python | mit | 3,492 | 0.0063 | from __future__ import print_function
from os.path import join, abspath
modules = ['parkind.f90',
'parrrtm.f90',
'rrlw_cld.f90',
'rrlw_con.f90',
'rrlw_kg01.f90',
'rrlw_kg02.f90',
'rrlw_kg03.f90',
'rrlw_kg04.f90',
'rrlw_kg05.f90',
'rrlw_kg06.f90',
'rrlw_kg07.f90',
'rrlw_kg08.f90',
'rrlw_kg09.f90',
'rrlw_kg10.f90',
'rrlw_kg11.f90',
'rrlw_kg12.f90',
'rrlw_kg13.f90',
'rrlw_kg14.f90',
'rrlw_kg15.f90',
'rrlw_kg16.f90',
'rrlw_ncpar.f90',
'rrlw_ref.f90',
'rrlw_tbl.f90',
'rrlw_vsn.f90',
'rrlw_wvn.f90',]
src = ['rrtmg_lw_k_g.f90',
'rrtmg_lw_taumol.f90',
'rrtmg_lw_setcoef.f90',
'rrtmg_lw_rtrnmc.f90',
'rrtmg_lw_cldprmc.f90',
'mcica_random_numbers.f90',
'mcica_subcol_gen_lw.f90',
'rrtmg_lw_init.f90',
'rrtmg_lw_rad.f90',]
unoptimized_src = ['rrtmg_lw_k_g.f90']
mod_src = ['rrtmg_lw_setcoef.f90',
'rrtmg_lw_rad.f90',]
def configuration(parent_package='', top_path=None):
global config
from numpy.distutils.misc_util import Configuration
from numpy.distutils.fcompiler import get_default_fcompiler, CompilerNotFound
build = True
try:
# figure out which compiler we're going to use
compiler = get_default_fcompiler()
# set some fortran compiler-dependent flags
f90flags = []
if compiler == 'gnu95':
f90flags.append('-fno-range-check')
f90flags.append('-ffree-form')
f90flags.append('-fPIC')
elif compiler == 'intel' or compiler == 'intelem':
f90flags.append('-132')
# Need zero-level optimization to avoid build problems with rrtmg_lw_k_g.f90
#f90flags.append('-O2')
# Suppress all compiler warnings (avoid huge CI log files)
f90flags.append('-w')
except CompilerNotFound:
print('No Fortran compiler found, not building the RRTMG_LW radiation module!')
build = False
config = Configuration(package_name='_rrtmg_lw', parent_name=parent_package, top_path=top_path)
if build:
config.add_extension(
name='_rrtmg_lw',
sources=[rrtmg_lw_gen_source],
extra_f90_compile_args=f90flags,
f2py_options=['--quiet'],
)
return config
def rrtmg_l | w_gen_source(ext, build_dir):
'''Add RRTMG_LW fortran source if Fortran 90 compiler available,
if no compiler is found do not try to build the extension.'''
thispath = config.local_path
module_src = []
for item in modules:
fullname = join(thispath,'rrtmg_lw_v4.85','gcm_model','modules',item)
module_src.append(fullname)
for item in src:
if item in mod_src:
| fullname = join(thispath,'sourcemods',item)
else:
fullname = join(thispath,'rrtmg_lw_v4.85','gcm_model','src',item)
module_src.append(fullname)
sourcelist = [join(thispath, '_rrtmg_lw.pyf'),
join(thispath, 'Driver.f90')]
try:
config.have_f90c()
return module_src + sourcelist
except:
print('No Fortran 90 compiler found, not building RRTMG_LW extension!')
return None
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
dslab-epfl/bugbase | lib/installer/dependency_installer.py | Python | bsd-3-clause | 4,885 | 0.002252 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
This scripts is a script to use to install dependencies of programs
"""
from abc import abstractmethod, ABCMeta
import logging
import platform
import subprocess
from lib.helper import launch_and_log_as_root
from lib.exceptions import DistributionNotSupportedException
from lib.installer.context_managers import FileLock
__author__ = "Benjamin Schubert, benjamin.schubert@epfl.ch"
class DependenciesInstaller(metaclass=ABCMeta):
"""
A base installer for dependencies required by bugbase or its programs. One subclass per supported distribution
should be made
"""
def __init__(self, packages):
self.packages = packages
@staticmethod # pragma nocover
@abstractmethod
def get_missing_packages(packages: list) -> list:
"""
Checks for packages if they are already installed or not. Returns all packages to install.
:param packages: the packages required
:return: List of packages not installed
"""
@staticmethod # pragma nocover
@abstractmethod
def install(packages: list) -> None:
"""
Install the given packages using the package manager of the host os
:param packages: list of packages to install
"""
@staticmethod # pragma nocover
@abstractmethod
def update_sources() -> None:
"""
Updates the sources of the distribution. This might help if we cannot download some packages, especially on
apt-based distributions
:raise subprocess.CalledProcessError
"""
@staticmethod
def factory(packages: list):
"""
Factory to get the correct dependency installer for the distribution
:param packages: the packages to install
:return: subclass of DependenciesInstaller
:raise DistributionNotSupportedException
"""
distribution = platform.dist()[0]
if distribution in ["Ubuntu", "debian"]:
return AptBasedInstaller(packages)
else:
raise DistributionNotSupportedException(distribution)
def run(self) -> None:
"""
Gets the correct installer, and install the missing packages
"""
missing = self.get_missing_packages(self.packages)
if not len(missing):
return
with FileLock("/tmp/.bugbase_lock_deps"):
try:
self.install(missing)
except subprocess.CalledProcessError:
logging.warning(
"An error occurred while installing dependencies. Trying to update sources and reinstall"
)
try:
self.update_sources()
self.install(missing)
except subprocess.CalledProcessError:
logging.fatal(
"An error occurred while installing packages. Please install manually :\n\t%(packages)s\n",
dict(packages="\n\t".join(missing))
)
raise
class AptBasedInstaller(DependenciesInstaller):
"""
An installer for Apt-based distribution.
Does not support Debian for now, as some packages are not in the repositories
"""
@staticmethod
def get_missing_packages(packages: list) -> list:
"""
Checks the system for all packages not installed from the given list
:param packages: the packages for which to search
:return: all non installed packaged from the packages list
"""
output = subprocess.check_output(["apt-cache", "policy"] + packages, stderr=subprocess.STDOUT)
packages_info = output.decode("UTF-8")
missing_dependencies = []
for package in packages:
index = packages_info.find(package)
if packages_info[index:index+len(package) + 21].endswith("Installed: (none)"):
missing_dependencies.append(package)
if index == -1:
logging.error("Could not find %(package)s in the repository", dict(package=package))
return missing_dependencies
@staticmethod
def install(packages: list) -> None:
"""
Installs the packages with an apt-enable system
:param packages: the packages to install
"""
logging.info("Will now install missing packages : %(packages)s", dict(packages=" ".join(packages)))
cmd = ["apt-get", "install", | "-y"] + packages
launch_and_log_as_root(cmd)
@staticmethod
def update_sources() -> None:
"""
Updates the repository sources, as on some OS, having | it out of sync may lead to error on installation
:raise subprocess.CalledProcessError if process fails
"""
logging.info("Updating apt repositories")
launch_and_log_as_root(["apt-get", "update"])
|
desavera/bicraft | dockerbuild/superset_config.py | Python | gpl-3.0 | 1,331 | 0.012772 | #---------------------------------------------------------
# Superset specific config
#---------------------------------------------------------
ROW_LIMIT = 5000
SUPERSET_WORKERS = 4
SUPERSET_WEBSERVER_PORT = 8080
CACHE_CONFIG={'CACHE_TYPE':'filesystem', 'CACHE_DEFAULT_TIMEOUT': 50000, 'CACHE_DIR': '/opt/superset/fscache'}
| #---------------------------------------------------------
#---------------------------------------------------------
# Flask App Builder configuration
#---------------------------------------------------------
# Your App secret key
SECRET_KEY = '\2\1thisismyscretkey\1\2\e\y\y\h'
# The SQLAlchemy connection string to your database backend
# This conne | ction defines the path to the database that stores your
# superset metadata (slices, connections, tables, dashboards, ...).
# Note that the connection information to connect to the datasources
# you want to explore are managed directly in the web UI
#SQLALCHEMY_DATABASE_URI = 'sqlite:////root/.superset/superset.db'
SQLALCHEMY_DATABASE_URI = 'sqlite:////opt/superset/superset.db'
#SQLALCHEMY_DATABASE_URI = 'sqlite:////docked/superset.db'
#SQLALCHEMY_DATABASE_URI = 'impala://'
# Flask-WTF flag for CSRF
CSRF_ENABLED = True
# Set this API key to enable Mapbox visualizations
MAPBOX_API_KEY = ''
SQLLAB_TIMEOUT=900
SUPERSET_WEBSERVER_TIMEOUT=900
|
111pontes/ydk-py | cisco-ios-xe/ydk/models/cisco_ios_xe/CISCO_ETHERLIKE_EXT_MIB.py | Python | apache-2.0 | 17,254 | 0.013272 | """ CISCO_ETHERLIKE_EXT_MIB
The MIB module to describe generic objects for
ethernet\-like network interfaces.
This MIB provides ethernet\-like network interfaces
information that are either excluded by EtherLike\-MIB
or specific to Cisco products.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class CiscoEtherlikeExtMib(object):
"""
.. attribute:: ceedot3pauseexttable
A list of additional descriptive and status information about the MAC Control PAUSE function on the ethernet\-like interfaces attached to a particular system, in extension to dot3PauseTable in EtherLike\-MIB. There will be one row in this table for each ethernet\-like interface in the system which supports the MAC Control PAUSE function
**type**\: :py:class:`Ceedot3Pauseexttable <ydk.models.cisco_ios_xe.CISCO_ETHERLIKE_EXT_MIB.CiscoEtherlikeExtMib.Ceedot3Pauseexttable>`
.. attribute:: ceesubinterfacetable
This table provides the subinterface related information associated to the Ethernet\-like interfaces. The subinterface is a division of one physical interface into multiple logical interfaces. As an example of what a typical subinterface setup might look like on a device, a single Ethernet port such as GigabitEthernet0/0 would be subdivided into Gi0/0.1, Gi0/0.2, Gi0/0.3 and so on, each one performing as if it were a separate interface
**type**\: :py:class:`Ceesubinterfacetable <ydk.models.cisco_ios_xe.CISCO_ETHERLIKE_EXT_MIB.CiscoEtherlikeExtMib.Ceesubinterfacetable>`
"""
_prefix = 'CISCO-ETHERLIKE-EXT-MIB'
_revision = '2010-06-04'
def __init__(self):
self.ceedot3pauseexttable = CiscoEtherlikeExtMib.Ceedot3Pauseexttable()
self.ceedot3pauseexttable.parent = self
self.ceesubinterfacetable = CiscoEtherlikeExtMib.Ceesubinterfacetable()
self.ceesubinterfacetable.parent = self
class Ceedot3Pauseexttable(object):
"""
A list of additional descriptive and status
information about the MAC Control PAUSE
function on the ethernet\-like interfaces
attached to a particular system, in extension to
dot3PauseTable in EtherLike\-MIB. There will be
one row in this table for each ethernet\-like
interface in the system which supports the MAC
Control PAUSE function.
.. attribute:: ceedot3pauseextentry
An entry in the table, containing additional information about the MAC Control PAUSE function on a single ethernet\-like interface, in extension to dot3PauseEntry in Etherlike\-MIB
**type**\: list of :py:class:`Ceedot3Pauseextentry <ydk.models.cisco_ios_xe.CISCO_ETHERLIKE_EXT_MIB.CiscoEtherlikeExtMib.Ceedot3Pauseexttable.Ceedot3Pauseextentry>`
"""
_prefix = 'CISCO-ETHERLIKE-EXT-MIB'
_revision = '2010-06-04'
def __init__(self):
self.parent = None
self.ceedot3pauseextentry = YList()
self.ceedot3pauseextentry.parent = self
self.ceedot3pauseextentry.name = 'ceedot3pauseextentry'
class Ceedot3Pauseextentry(object):
"""
An entry in the table, containing additional
information about the MAC Control PAUSE function
on a single ethernet\-like interface, in extension
to dot3PauseEntry in Etherlike\-MIB.
.. attribute:: dot3statsindex <key>
**type**\: int
**range:** 1..2147483647
**refers to**\: :py:class:`dot3statsindex <ydk.models.cisco_ios_xe.EtherLike_MIB.EtherlikeMib.Dot3Statstable.Dot3Statsentry>`
.. attribute:: ceedot3pauseextadminmode
Indicates preference to send or process pause frames on this interface. txDesired(0) \- indicates preference to send pause frames, but autonegotiates flow control. This bit can only be turned on when the corresponding instance of dot3PauseAdminMode has the value of 'enabledXmit' or 'enabledXmitAndRcv'. rxDesired(1) \- indicates preference to process pause frames, but autonegotiates flow control. This bit can only be turned on when the corresponding instance of dot3PauseAdminMode has the value of 'enabledRcv' or 'enabledXmitAndRcv'
**type**\: :py:class:`Ceedot3Pauseextadminmode <ydk.models.cisco_ios_xe.CISCO_ETHERLIKE_EXT_MIB.CiscoEtherlikeExtMib.Ceedot3Pauseexttable.Ceedot3Pauseextentry.Ceed | ot3Pauseextadminmode>`
.. attribute:: ceedot3pauseextopermode
Provides additional information about the flow control operational status on this interface. txDisagree(0) \- the transmit pause function on this interface is disabled due to disagreement from the far end on negotiation. rxD | isagree(1) \- the receive pause function on this interface is disabled due to disagreement from the far end on negotiation. txDesired(2) \- the transmit pause function on this interface is desired. rxDesired(3) \- the receive pause function on this interface is desired
**type**\: :py:class:`Ceedot3Pauseextopermode <ydk.models.cisco_ios_xe.CISCO_ETHERLIKE_EXT_MIB.CiscoEtherlikeExtMib.Ceedot3Pauseexttable.Ceedot3Pauseextentry.Ceedot3Pauseextopermode>`
"""
_prefix = 'CISCO-ETHERLIKE-EXT-MIB'
_revision = '2010-06-04'
def __init__(self):
self.parent = None
self.dot3statsindex = None
self.ceedot3pauseextadminmode = CiscoEtherlikeExtMib.Ceedot3Pauseexttable.Ceedot3Pauseextentry.Ceedot3Pauseextadminmode()
self.ceedot3pauseextopermode = CiscoEtherlikeExtMib.Ceedot3Pauseexttable.Ceedot3Pauseextentry.Ceedot3Pauseextopermode()
class Ceedot3Pauseextadminmode(FixedBitsDict):
"""
Ceedot3Pauseextadminmode
Indicates preference to send or process pause
frames on this interface.
txDesired(0) \- indicates preference to send pause
frames, but autonegotiates flow
control. This bit can only be
turned on when the corresponding
instance of dot3PauseAdminMode
has the value of 'enabledXmit' or
'enabledXmitAndRcv'.
rxDesired(1) \- indicates preference to process
pause frames, but autonegotiates
flow control. This bit can only be
turned on when the corresponding
instance of dot3PauseAdminMode
has the value of 'enabledRcv' or
'enabledXmitAndRcv'.
Keys are:- rxDesired , txDesired
"""
def __init__(self):
self._dictionary = {
'rxDesired':False,
'txDesired':False,
}
self._pos_map = {
'rxDesired':1,
'txDesired':0,
}
class Ceedot3Pauseextopermode(FixedBitsDict):
"""
Ceedot3Pauseextopermode
Provides additional information about the flow
control operational status on this interface.
txDisagr |
tellesnobrega/storm_plugin | sahara/tests/unit/utils/test_edp.py | Python | apache-2.0 | 1,793 | 0 | # Copyright (c) 2014 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from sahara.utils import edp
class SplitJobTypeTest(testtools.TestCase):
def test_split_job_type(self):
jtype, stype = edp.split_job_type(edp.JOB_TYPE_MAPREDUCE)
self.assertEqual(jtype, edp.JOB_TYPE_MAPREDUCE)
self.assertEqual(stype, edp.JOB_SUBTYPE_NONE)
jtype, stype = edp.split_job_type(edp.JOB_TYPE_MAPREDUCE_STREAMING)
self.assertEqual(jtype, edp.JOB_TYPE_MAPREDUCE)
self.assertEqual(stype, edp.JOB_SUBTYPE_STREAMING)
def test_compare_job_type(self):
self.assertTrue(edp.compare_job_type(
edp.JOB_TYPE_JAVA,
edp.JOB_TYP | E_JAVA,
edp.JOB_TYPE_MAPREDUCE,
strict=True))
self.assertFalse(edp.compare_j | ob_type(
edp.JOB_TYPE_MAPREDUCE_STREAMING,
edp.JOB_TYPE_JAVA,
edp.JOB_TYPE_MAPREDUCE,
strict=True))
self.assertTrue(edp.compare_job_type(
edp.JOB_TYPE_MAPREDUCE_STREAMING,
edp.JOB_TYPE_JAVA,
edp.JOB_TYPE_MAPREDUCE))
self.assertFalse(edp.compare_job_type(
edp.JOB_TYPE_MAPREDUCE,
edp.JOB_TYPE_JAVA,
edp.JOB_TYPE_MAPREDUCE_STREAMING))
|
aaronkurtz/gourmand | gourmand/subscriptions/migrations/0002_make_unique_per_user.py | Python | gpl-2.0 | 516 | 0 | # -*- coding: utf-8 -*-
from __future__ import un | icode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('subscriptions', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
| name='personalarticle',
unique_together=set([('sub', 'article')]),
),
migrations.AlterUniqueTogether(
name='subscription',
unique_together=set([('owner', 'feed')]),
),
]
|
ytsapras/robonet_site | events/management/commands/remove_old_events.py | Python | gpl-2.0 | 985 | 0.012183 | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 12 11:31:48 2017
@author: rstreet
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from events.models import EventName, Event
from sys import exit
class Command(BaseCommand):
help = ''
def _remove_old_events(self,*args, **options):
moa_events = EventName.objects.all().filter(name__contains='MOA-2008')
ogle_events = EventName.objects.all().filter(name__contains='OGLE-2008')
for event in moa_events:
Event.objects.filter(id=event.event_id).delete()
print('Removed even | t '+event.name+', ID='+str(event.event_id))
for event in ogle_events:
Event.objects.filter(id=event.event_id).delete()
print('Removed event '+event.name+', ID='+str(event.event_id))
def handle(self,*args, **options):
self._remove_old_events(*args, | **options)
|
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/network/ovs/openvswitch_bridge.py | Python | bsd-3-clause | 8,968 | 0.001227 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, David Stygstra <david.stygstra@gmail.com>
# Portions copyright @ 2015 VMware, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: openvswitch_bridge
version_added: 1.4
author: "David Stygstra (@stygstra)"
short_description: Manage Open vSwitch bridges
requirements: [ ovs-vsctl ]
description:
- Manage Open vSwitch bridges
options:
bridge:
required: true
description:
- Name of bridge or fake bridge to manage
parent:
version_added: "2.3"
required: false
default: None
description:
- Bridge parent of the fake bridge to manage
vlan:
version_added: "2.3"
required: false
default: None
description:
- The VLAN id of the fake bridge to manage (must be between 0 and
4095). This parameter is required if I(parent) parameter is set.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the bridge should exist
timeout:
required: false
default: 5
description:
- How long to wait for ovs-vswitchd to respond
external_ids:
version_added: 2.0
required: false
default: None
description:
- A dictionary of external-ids. Omitting this parameter is a No-op.
To clear all external-ids pass an empty value.
fail_mode:
version_added: 2.0
default: None
required: false
choices : [secure, standalone]
description:
- Set bridge fail-mode. The default value (None) is a No-op.
set:
version_added: 2.3
required: false
default: None
description:
- Run set command after bridge configuration. This parameter is
non-idempotent, play will always return I(changed) state if
present
'''
EXAMPLES = '''
# Create a bridge named br-int
- openvswitch_bridge:
bridge: br-int
state: present
# Create a fake bridge named br-int within br-parent on the VLAN 405
- openvswitch_bridge:
bridge: br-int
parent: br-parent
vlan: 405
state: present
# Create an integration bridge
- openvswitch_bridge:
bridge: br-int
state: present
fail_mode: secure
args:
external_ids:
bridge-id: br-int
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
def _fail_mode_to_str(text):
if not text:
return None
else:
return text.strip()
def _external_ids_to_dict(text):
if not text:
return None
else:
d = {}
for l in text.splitlines():
if l:
k, v = l.split('=')
d[k] = v
return d
def map_obj_to_commands(want, have, module):
commands = list()
if module.params['state'] == 'absent':
if have:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s del-br"
" %(bridge)s")
command = templatized_command % module.params
commands.append(command)
else:
if have:
if want['fail_mode'] != have['fail_mode']:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" set-fail-mode %(bridge)s"
" %(fail_mode)s")
command = templatized_command % module.params
commands.append(command)
if want['external_ids'] != have['external_ids']:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" br-set-external-id %(bridge)s")
command = templatized_command % module.params
if want['external_ids']:
for k, v in iteritems(want['external_ids']):
if (k not in have['external_ids']
or want['external_ids'][k] != have['external_ids'][k]):
command += " " + k + " " + v
commands.append(command)
else:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s add-br"
" %(bridge)s")
command = te | mplatized_command % module.params
if want['parent']:
templatized_command = "%(parent)s %(vlan)s"
command += " " + templatized_command % module.params
if want['set']:
templatized_command = " -- set %(set)s"
command += | templatized_command % module.params
commands.append(command)
if want['fail_mode']:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" set-fail-mode %(bridge)s"
" %(fail_mode)s")
command = templatized_command % module.params
commands.append(command)
if want['external_ids']:
for k, v in iteritems(want['external_ids']):
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" br-set-external-id %(bridge)s")
command = templatized_command % module.params
command += " " + k + " " + v
commands.append(command)
return commands
def map_config_to_obj(module):
templatized_command = "%(ovs-vsctl)s -t %(timeout)s list-br"
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
if rc != 0:
module.fail_json(msg=err)
obj = {}
if module.params['bridge'] in out.splitlines():
obj['bridge'] = module.params['bridge']
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s br-to-parent"
" %(bridge)s")
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
obj['parent'] = out.strip()
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s br-to-vlan"
" %(bridge)s")
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
obj['vlan'] = out.strip()
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s get-fail-mode"
" %(bridge)s")
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
obj['fail_mode'] = _fail_mode_to_str(out)
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s br-get-external-id"
" %(bridge)s")
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
obj['external_ids'] = _external_ids_to_dict(out)
return obj
def map_params_to_obj(module):
obj = {
'bridge': module.params['bridge'],
'parent': module.params['parent'],
'vlan': module.params['vlan'],
'fail_mode': module.params['fail_mode'],
'external_ids': module.params['external_ids'],
'set': module.params['set']
}
return obj
def main():
""" Entry point. """
argument_spec = {
'bridge': {'required': True},
'parent': {'default': None},
'vlan': {'default': None, 'type': 'int'},
'state': {'default': 'present', 'choices': ['present', 'absent']},
'timeout': {'default': 5, 'type': 'int'},
'external_ids': {'default': None, 'type': 'dict'},
'fail_mode': {'default': None},
'set': {'required': False, 'default': None}
}
required_if = |
PoplarYang/oneinstack-odm | include/get_public_ipaddr.py | Python | apache-2.0 | 536 | 0.044776 | #!/usr/bin/env python
import re,urllib2
class Get_public_ip:
def getip(self):
try:
myip = self.visit("http://ip.chinaz.com/getip.aspx")
except:
try:
myip = self.visit("http://ipv4.icanhazip.com/")
except: |
myip = "So sorry!!!"
return myip
def visit(self,url):
opener = urllib2.urlopen(url)
| if url == opener.geturl():
str = opener.read()
return re.search('\d+\.\d+\.\d+\.\d+',str).group(0)
if __name__ == "__main__":
getmyip = Get_public_ip()
print getmyip.getip()
|
adsworth/ldp3 | ldp/trip/models.py | Python | mit | 2,583 | 0.007356 | from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.utils.translation import ugettext as _
from django.utils.timezone import localtime
from actstream import action
from timedelta.fields import TimedeltaField
from timedelta.helpers import nice_repr
from utils.choices import Choices
from utils.human import seconds_to_human
class Trip(models.Model):
TYPE_CHOICES = | Choices((
('push', 'PUSH', _('pushed')),
('pump', 'PUMP', _('pumped')),
('paddle', 'PADDLE', _('paddled')),
))
skater = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='trips')
type = models.CharField(verbose_name=_('Type'), max_length=6, choices=TYPE_CHOICES, default=TYPE_CHOICES.PUMP)
start_utc = models.DateTimeField(verbose_name=_('Start time'))
end_utc = models.DateTimeField(verbose_nam | e=_('End time'), editable=False)
distance = models.DecimalField(verbose_name=_('Distance'), max_digits=5, decimal_places=2)
notes = models.TextField(verbose_name=_('Notes'), default='', blank=True)
duration = TimedeltaField(verbose_name=_('Duration'))
avg_speed = models.DecimalField(_('Avg. Speed'), max_digits=5, decimal_places=2, editable=False)
class Meta:
ordering = ['-start_utc']
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
self.end_utc = self.start_utc + self.duration
tick = self.distance / self.duration.seconds
self.avg_speed = tick * 60 * 60
super(Trip, self).save(force_insert=False, force_update=False, using=None,
update_fields=None)
def __unicode__(self):
return "trip"
@property
def start(self):
return localtime(self.start_utc)
@property
def end(self):
return localtime(self.end_utc)
@property
def start_formatted(self):
return localtime(self.end_utc)
@property
def start_formatted(self):
return localtime(self.end_utc)
def get_absolute_url(self):
return reverse('trip_detail', kwargs={'pk':self.id})
def duration_human(self):
return nice_repr(self.duration)
def my_handler(sender, instance, created, **kwargs):
action.send(instance.skater, verb='created', action_object=instance)
post_save.connect(my_handler, sender=Trip) |
jiobert/python | Velez_Felipe/Asignments/draw_stars.py | Python | mit | 300 | 0.066667 | x = [4,6,1,3,5,7,25]
y = [4, "Tom", 1, "Michael", 5, 7, "Jimmy Smith"]
def draw_stars(x):
index = 0
while index < len(x):
if type(x[index])is str:
first_letter = x[index].lower()
print first_letter[0] * len(first_l | etter)
else:
print "*" * x[index]
index = index + 1
dr | aw_stars(y)
|
Kupoman/yggdrasil | docs/ext/breathe/renderer/rst/doxygen/domain.py | Python | apache-2.0 | 6,928 | 0.003897 |
class DomainHelper(object):
pass
class NullDomainHelper(DomainHelper):
pass
class CppDomainHelper(DomainHelper):
def __init__(self, definition_parser, substitute):
self.definition_parser = definition_parser
self.substitute = substitute
self.duplicates = {}
def check_cache(self, _id):
try:
return True, self.duplicates[_id]
except KeyError:
return False, ""
def cache(self, _id, project_info):
self.duplicates[_id] = project_info
def remove_word(self, word, definition):
return self.substitute(r"(\s*\b|^)%s\b\s*" % word, "", definition)
class CDomainHelper(DomainHelper):
def __init__(self):
self.duplicates = set()
def is_duplicate(self, name):
return name in self.duplicates
def remember(self, name):
self.duplicates.add(name)
class DomainHandler(object):
def __init__(self, node_factory, document, env, helper, project_info, target_handler):
self.node_factory = node_factory
self.document = document
self.env = env
self.helper = helper
self.project_info = project_info
self.target_handler = target_handler
class NullDomainHandler(DomainHandler):
def __init__(self):
pass
def create_function_id(self, data_object):
return ""
def create_function_target(self, data_object):
return []
def create_class_id(self, data_object):
return ""
def create_class_target(self, data_object):
return []
class CDomainHandler(DomainHandler):
def create_function_id(self, data_object):
name = data_object.definition.split()[-1]
return name
def create_function_target(self, data_object):
name = data_object.definition.split()[-1]
return self._create_target(name, "function")
def _create_target(self, name, type_):
if self.helper.is_duplicate(name):
print ( "Warning: Ignoring duplicate '%s'. As C does not support overloaded "
"functions. Perhaps you should be using the cpp domain?" % name )
return
self.helper.remember(name)
# Create target node. This is required for LaTeX output as target nodes are converted to the
# appropriate \phantomsection & \label for in document LaTeX links
| (target,) = self.target_handler.create_target(name)
| inv = self.env.domaindata['c']['objects']
if name in inv:
self.env.warn(
self.env.docname,
'duplicate C object description of %s, ' % name +
'other instance in ' + self.env.doc2path(inv[name][0]),
self.lineno)
inv[name] = (self.env.docname, "function")
return [target]
class CppDomainHandler(DomainHandler):
def create_class_id(self, data_object):
def_ = data_object.name
parser = self.helper.definition_parser(def_)
sigobj = parser.parse_class()
return sigobj.get_id()
def create_class_target(self, data_object):
id_ = self.create_class_id(data_object)
name = data_object.name
return self._create_target(name, "class", id_)
def create_function_id(self, data_object):
definition = self.helper.remove_word("virtual", data_object.definition)
argstring = data_object.argsstring
explicit = "explicit " if data_object.explicit == "yes" else ""
def_ = "%(explicit)s%(definition)s%(argstring)s" % {
"explicit" : explicit,
"definition" : definition,
"argstring" : argstring,
}
parser = self.helper.definition_parser(def_)
sigobj = parser.parse_function()
return sigobj.get_id()
def create_function_target(self, data_object):
id_ = self.create_function_id(data_object)
name = data_object.definition.split()[-1]
return self._create_target(name, "function", id_)
def _create_target(self, name, type_, id_):
"""Creates a target node and registers it with the appropriate domain
object list in a style which matches Sphinx's behaviour for the domain
directives like cpp:function"""
# Check if we've already got this id
in_cache, project = self.helper.check_cache(id_)
if in_cache:
print "Warning: Ignoring duplicate domain reference '%s'. " \
"First found in project '%s'" % (id_, project.reference())
return []
self.helper.cache(id_, self.project_info)
# Create target node. This is required for LaTeX output as target nodes are converted to the
# appropriate \phantomsection & \label for in document LaTeX links
(target,) = self.target_handler.create_target(id_)
# Register object with the sphinx objects registry
self.document.settings.env.domaindata['cpp']['objects'].setdefault(name,
(self.document.settings.env.docname, type_, id_))
return [target]
class DomainHandlerFactory(object):
def __init__(self, project_info, node_factory, document, env, target_handler, helpers):
self.project_info = project_info
self.node_factory = node_factory
self.document = document
self.env = env
self.target_handler = target_handler
self.domain_helpers = helpers
def create_null_domain_handler(self):
return NullDomainHandler()
def create_domain_handler(self, file_):
domains_handlers = {
"c" : CDomainHandler,
"cpp" : CppDomainHandler,
}
domain = self.project_info.domain_for_file(file_)
try:
helper = self.domain_helpers[domain]
except KeyError:
helper = NullDomainHelper()
try:
return domains_handlers[domain](self.node_factory, self.document, self.env, helper,
self.project_info, self.target_handler)
except KeyError:
return NullDomainHandler()
class NullDomainHandlerFactory(object):
def create_null_domain_handler(self):
return NullDomainHandler()
def create_domain_handler(self, file_):
return NullDomainHandler()
class DomainHandlerFactoryCreator(object):
def __init__(self, node_factory, helpers):
self.node_factory = node_factory
self.helpers = helpers
def create_domain_handler_factory(self, project_info, document, env, options, target_handler):
if "no-link" in options:
return NullDomainHandlerFactory()
return DomainHandlerFactory(
project_info,
self.node_factory,
document,
env,
target_handler,
self.helpers
)
|
reviewboard/reviewboard | reviewboard/hostingsvcs/tests/test_client.py | Python | mit | 29,598 | 0 | """Test cases for the hosting service client support."""
from kgb import SpyAgency
from reviewboard.hostingsvcs.models import HostingServiceAccount
from reviewboard.hostingsvcs.service import (HostingService,
HostingServiceClient,
HostingServiceHTTPRequest,
HostingServiceHTTPResponse)
from reviewboard.testing.testcase import TestCase
class DummyHTTPRequest(HostingServiceHTTPRequest):
def open(self):
method = self.method
if method in ('DELETE', 'HEAD'):
data = None
else:
data = b'{"key": "test response"}'
if method == 'DELETE':
status_code = 204
elif method == 'POST':
status_code = 201
else:
status_code = 200
return HostingServiceHTTPResponse(
request=self,
url=self.url,
data=data,
headers={
str('Test-header'): str('Value'),
},
status_code=status_code)
class HostingServiceHTTPRequestTests(TestCase):
"""Unit tests for HostingServiceHTTPRequest."""
def test_init_with_query(self):
"""Testing HostingServiceHTTPRequest construction with query="""
request = HostingServiceHTTPRequest(
url='http://example.com?z=1&z=2&baz=true',
query={
'foo': 'bar',
'a': 10,
'list': ['a', 'b', 'c'],
})
self.assertEqual(
request.url,
'http://example.com?a=10&baz=true&foo=bar&list=a&list=b&list=c'
'&z=1&z=2')
def test_init_with_body_not_bytes(self):
"""Testing HostingServiceHTTPRequest construction with non-bytes body
"""
account = HostingServiceAccount()
service = HostingService(account)
expected_message = (
'Received non-bytes body for the HTTP request for %r. This is '
'likely an implementation problem. Please make sure only byte '
'strings are sent for the request body.'
% HostingService
)
with self.assertRaisesMessage(TypeError, expect | ed_message):
HostingServiceHTTPRequest(
url='http://example.com?z=1&z=2&baz=true',
method='POST',
body=123,
hosting_service=service)
def test_init_with_header_key_not_unicode(self):
"""Testing HostingServiceHTTPRequest construction with non-Unicode
header key
"""
account = HostingServiceAccount()
service = Hos | tingService(account)
expected_message = (
'Received non-Unicode header %r (value=%r) for the HTTP request '
'for %r. This is likely an implementation problem. Please make '
'sure only Unicode strings are sent in request headers.'
% (b'My-Header', 'abc', HostingService)
)
with self.assertRaisesMessage(TypeError, expected_message):
HostingServiceHTTPRequest(
url='http://example.com?z=1&z=2&baz=true',
method='POST',
headers={
b'My-Header': 'abc',
},
hosting_service=service)
def test_init_with_header_value_not_unicode(self):
"""Testing HostingServiceHTTPRequest construction with non-Unicode
header value
"""
account = HostingServiceAccount()
service = HostingService(account)
expected_message = (
'Received non-Unicode header %r (value=%r) for the HTTP request '
'for %r. This is likely an implementation problem. Please make '
'sure only Unicode strings are sent in request headers.'
% ('My-Header', b'abc', HostingService)
)
with self.assertRaisesMessage(TypeError, expected_message):
HostingServiceHTTPRequest(
url='http://example.com?z=1&z=2&baz=true',
method='POST',
headers={
'My-Header': b'abc',
},
hosting_service=service)
def test_add_basic_auth(self):
"""Testing HostingServiceHTTPRequest.add_basic_auth"""
request = HostingServiceHTTPRequest('http://example.com')
request.add_basic_auth(b'username', b'password')
self.assertEqual(
request.headers,
{
'Authorization': 'Basic dXNlcm5hbWU6cGFzc3dvcmQ=',
})
def test_get_header(self):
"""Testing HostingServiceHTTPRequest.get_header"""
request = HostingServiceHTTPRequest(
'http://example.com',
headers={
'Authorization': 'Basic abc123',
'Content-Length': '123',
})
self.assertEqual(request.get_header('Authorization'), 'Basic abc123')
self.assertEqual(request.get_header('AUTHORIZATION'), 'Basic abc123')
self.assertEqual(request.get_header('authorization'), 'Basic abc123')
self.assertEqual(request.get_header('Content-Length'), '123')
self.assertEqual(request.get_header('CONTENT-LENGTH'), '123')
self.assertEqual(request.get_header('content-length'), '123')
class HostingServiceHTTPResponseTests(TestCase):
"""Unit tests for HostingServiceHTTPResponse."""
def test_json(self):
"""Testing HostingServiceHTTPResponse.json"""
request = HostingServiceHTTPRequest('http://example.com')
response = HostingServiceHTTPResponse(request=request,
url='http://example.com',
data=b'{"a": 1, "b": 2}',
headers={},
status_code=200)
self.assertEqual(
response.json,
{
'a': 1,
'b': 2,
})
def test_json_with_non_json_response(self):
"""Testing HostingServiceHTTPResponse.json with non-JSON response"""
request = HostingServiceHTTPRequest('http://example.com')
response = HostingServiceHTTPResponse(request=request,
url='http://example.com',
data=b'{[(',
headers={},
status_code=200)
with self.assertRaises(ValueError):
response.json
def test_get_header(self):
"""Testing HostingServiceHTTPRequest.get_header"""
request = HostingServiceHTTPRequest('http://example.com')
response = HostingServiceHTTPResponse(
request=request,
url=request.url,
status_code=200,
data=b'',
headers={
str('Authorization'): str('Basic abc123'),
str('Content-Length'): str('123'),
})
self.assertEqual(response.get_header('Authorization'), 'Basic abc123')
self.assertEqual(response.get_header('AUTHORIZATION'), 'Basic abc123')
self.assertEqual(response.get_header('authorization'), 'Basic abc123')
self.assertEqual(response.get_header('Content-Length'), '123')
self.assertEqual(response.get_header('CONTENT-LENGTH'), '123')
self.assertEqual(response.get_header('content-length'), '123')
class HostingServiceClientTests(SpyAgency, TestCase):
"""Unit tests for HostingServiceClient"""
def setUp(self):
super(HostingServiceClientTests, self).setUp()
account = HostingServiceAccount()
service = HostingService(account)
self.client = HostingServiceClient(service)
self.client.http_request_cls = DummyHTTPRequest
def test_http_delete(self):
"""Testing HostingServiceClient.http_delete"""
self.spy_on(self.client.build_http_request)
response = self.client.http_delete(
url='http://example.com',
headers={
|
live4thee/zstack-utility | cephbackupstorage/cephbackupstorage/cephagent.py | Python | apache-2.0 | 33,834 | 0.00269 | __author__ = 'frank'
import os
import os.path
import pprint
import re
import traceback
import urllib2
import zstacklib.utils.daemon as daemon
import zstacklib.utils.http as http
import zstacklib.utils.jsonobject as jsonobject
from zstacklib.utils import lock
from zstacklib.utils import linux
from zstacklib.utils import log
from zstacklib.utils import thread
from zstacklib.utils.bash import *
from zstacklib.utils.report import Report
from zstacklib.utils import shell
from zstacklib.utils.rollback import rollback, rollbackable
logger = log.get_logger(__name__)
class AgentResponse(object):
def __init__(self, success=True, error=None):
self.success = success
self.error = error if error else ''
self.totalCapacity = None
self.availableCapacity = None
class InitRsp(AgentResponse):
def __init__(self):
super(InitRsp, self).__init__()
self.fsid = None
class DownloadRsp(AgentResponse):
def __init__(self):
super(DownloadRsp, self).__init__()
self.size = None
self.actualSize = None
class UploadProgressRsp(AgentResponse):
def __init__(self):
super(UploadProgressRsp, self).__init__()
self.completed = False
self.progress = 0
self.size = 0
self.actualSize = 0
self.installPath = None
class GetImageSizeRsp(AgentResponse):
def __init__(self):
super(GetImageSizeRsp, self).__init__()
self.size = None
self.actualSize = None
class PingRsp(AgentResponse):
def __init__(self):
super(PingRsp, self).__init__()
self.failure = None
class GetFactsRsp(AgentResponse):
def __init__(self):
super(GetFactsRsp, self).__init__()
self.fsid = None
self.monAddr = None
class DeleteImageMetaDataResponse(AgentResponse):
def __init__(self):
super(DeleteImageMetaDataResponse,self).__init__()
self.ret = None
class WriteImageMetaDataResponse(AgentResponse):
def __init__(self):
super(WriteImageMetaDataResponse,self).__init__()
class GetImageMetaDataResponse(AgentResponse):
def __init__(self):
super(GetImageMetaDataResponse,self).__init__()
self.imagesMetadata= None
class DumpImageMetaDataToFileResponse(AgentResponse):
def __init__(self):
super(DumpImageMetaDataToFileResponse,self).__init__()
class CheckImageMetaDataFileExistResponse(AgentResponse):
def __init__(self):
super(CheckImageMetaDataFileExistResponse, self).__init__()
self.backupStorageMetaFileName = None
self.exist = None
class GetLocalFileSizeRsp(AgentResponse):
def __init__(self):
super(GetLocalFileSizeRsp, self).__init__()
self.size = None
def replyerror(func):
@functools.wraps(func)
def wrap(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
content = traceback.format_exc()
err = '%s\n%s\nargs:%s' % (str(e), content, pprint.pformat([args, kwargs]))
rsp = AgentResponse()
rsp.success = False
rsp.error = str(e)
logger.warn(err)
return jsonobject.dumps(rsp)
return wrap
class UploadTask(object):
def __init__(self, imageUuid, installPath, dstPath, tmpPath):
self.completed = False
self.imageUuid = imageUuid
self.installPath = installPath
self.dstPath = dstPath # without 'ceph://'
self.tmpPath = tmpPath # where image firstly imported to
self.expectedSize = 0
self.downloadedSize = 0
self.progress = 0
self.lastError = None
self.lastOpTime = linux.get_current_timestamp()
def fail(self, reason):
self.completed = True
self.lastError = reason
self.lastOpTime = linux.get_current_timestamp()
logger.info('task failed for %s: %s' % (self.imageUuid, reason))
def success(self):
self.completed = True
self.progress = 100
self.lastOpTime = linux.get_current_timestamp()
def is_started(self):
return self.progress > 0
def is_running(self):
return not(self.completed or self.is_started())
class UploadTasks(object):
MAX_RECORDS = 80
def __init__(self):
self.tasks = {}
def _expunge_oldest_task(self):
key, ts = '', linux.get_current_timestamp()
for k in self.tasks:
task = self.tasks[k]
if task.is_running():
continue
if task.lastOpTime < ts:
key, ts = k, task.lastOpTime
if key != '': del(self.tasks[key])
@lock.lock('ceph-upload-task')
def add_task(self, t):
if len(self.tasks) > self.MAX_RECORDS:
self._expunge_oldest_task()
self.tasks[t.imageUuid] = t
@lock.lock('ceph-upload-task')
def get_task(self, imageUuid):
return self.tasks.get(imageUuid)
# ------------------------------------------------------------------ #
class ProgressedFileWriter(object):
def __init__(self, wfd, pfunc):
self.wfd = wfd
self.pfunc = pfunc
self.bytesWritten = 0
def write(self, s):
self.wfd.write(s)
self.bytesWritten += len(s)
self.pfunc(self.bytesWritten)
def seek(self, offset, whence=None):
pass
import cherrypy
class CustomPart(cherrypy._cpreqbody.Part):
"""A customized multipart"""
maxrambytes = 0
def __init__(self, fp, headers, boundary, fifopath, pfunc):
cherrypy._cpreqbody.Part.__init__(self, fp, headers, boundary)
self.wfd = None
self.file = None
self.value = None
self.fifopath = fifopath
self.pfunc = pfunc
def make_file(self):
self.wfd = open(self.fifopath, 'w')
return ProgressedFileWriter(self.wfd, self.pfunc)
def get_boundary(entity):
ib = ""
if 'boundary' in entity.content_type.params:
# http://tools.ietf.org/html/rfc2046#section-5.1.1
# "The grammar for parameters on the Content-type field is such that it
# is often necessary to enclose the boundary parameter values in quotes
| # on the Content-type line"
ib = entity.content_type.params['boundary'].strip('"')
if not re.match("^[ -~]{0,200}[!-~]$", ib):
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
ib = ('--' + ib).encode('ascii')
# Find the first marker
while True:
b = entity.read | line()
if not b:
return
b = b.strip()
if b == ib:
break
return ib
def stream_body(task, fpath, entity, boundary):
def _progress_consumer(total):
task.downloadedSize = total
@thread.AsyncThread
def _do_import(task, fpath):
shell.check_run("cat %s | rbd import --image-format 2 - %s" % (fpath, task.tmpPath))
while True:
headers = cherrypy._cpreqbody.Part.read_headers(entity.fp)
p = CustomPart(entity.fp, headers, boundary, fpath, _progress_consumer)
if not p.filename:
continue
# start consumer
_do_import(task, fpath)
try:
p.process()
except Exception as e:
logger.warn('process image %s failed: %s' % (task.imageUuid, str(e)))
pass
finally:
if p.wfd is not None:
p.wfd.close()
break
if task.downloadedSize != task.expectedSize:
task.fail('incomplete upload, got %d, expect %d' % (task.downloadedSize, task.expectedSize))
shell.run('rbd rm %s' % task.tmpPath)
return
file_format = None
try:
file_format = linux.get_img_fmt('rbd:'+task.tmpPath)
except Exception as e:
task.fail('upload image %s failed: %s' % (task.imageUuid, str(e)))
return
if file_format == 'qcow2':
if linux.qcow2_get_backing_file('rbd:'+task.tmpPath):
task.fail('Qcow2 image %s has backing file' % task.imageUuid)
shell.run('rbd rm %s' % task.tmpPath)
return
conf_path = None
try:
with open('/etc/ceph/ceph.conf', 'r') as fd:
|
CloCkWeRX/rabbitvcs-svn-mirror | rabbitvcs/ui/property_page.py | Python | gpl-2.0 | 7,016 | 0.009407 | #
# This is an extension to the Nautilus file manager to allow better
# integration with the Subversion source control system.
#
# Copyright (C) 2010 by Jason Heeris <jason.heeris@gmail.com>
#
# RabbitVCS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# RabbitVCS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RabbitVCS; If not, see <http://www.gnu.org/licenses/>.
#
import os.path
import os
if "NAUTILUS_PYTHON_REQUIRE_GTK3" in os.environ and os.environ["NAUTILUS_PYTHON_REQUIRE_GTK3"]:
from gi.repository import Gtk as gtk
GTK3 = True
ICON_SIZE_BUTTON = gtk.IconSize.BUTTON
ICON_SIZE_DIALOG = gtk.IconSize.DIALOG
else:
import gtk
GTK3 = False
ICON_SIZE_BUTTON = gtk.ICON_SIZE_BUTTON
ICON_SIZE_DIALOG = gtk.ICON_SIZE_DIALOG
from collections import defaultdict
import rabbitvcs.ui
import rabbitvcs.ui.widget
import rabbitvcs.vcs
from rabbitvcs.services.checkerservice import StatusCheckerStub as StatusChecker
from rabbitvcs.ui import STATUS_EMBLEMS
from rabbitvcs.util.log import Log
log = Log("rabbitvcs.ui.property_page")
from rabbitvcs import gettext
_ = gettext.gettext
class PropertyPage(rabbitvcs.ui.GtkBuilderWidgetWrapper):
gtkbuilder_filename = "property_page"
gtkbuilder_id = "prop_page_scroller"
def __init__(self, paths, vcs=None, claim_domain=True):
rabbitvcs.ui.GtkBuilderWidgetWrapper.__init__(self,
claim_domain=claim_domain)
self.paths = paths
self.vcs = vcs or rabbitvcs.vcs.VCS()
self.info_pane = self.get_widget("property_page")
if len(paths) == 1:
file_info = FileInfoPane(paths[0], self.vcs,
claim_domain=self.claim_domain)
self.info_pane.pack_start(file_info.get_widget(),
expand=False,
fill=False,
padding=0)
elif len(paths) > 1:
try:
for path in paths:
expander = FileInfoExpander(path, self.vcs,
claim_domain=self.claim_domain)
self.info_pane.pack_start(expander.get_widget(),
expand=False,
fill=False,
padding=0)
except Exception, ex:
log.exception(ex)
raise
class FileInfoPane(rabbitvcs.ui.GtkBuilderWidgetWrapper):
gtkbuilder_filename = "property_page"
gtkbuilder_id = "file_info_table"
def __init__(self, path, vcs=None, claim_domain=True):
rabbitvcs.ui.GtkBuilder | WidgetWrapper.__init__(self,
claim_domain=claim_domain)
self.path = path
self.vcs = vcs or rabbitvcs.vcs.VCS()
self.checker = StatusChecker()
self.get_widget("file_name").set_text(os.path.basename(path))
self.status = self.checker.check_status(path,
recurse = False,
| invalidate = False,
summary = False)
self.get_widget("vcs_type").set_text(self.status.vcs_type)
self.get_widget("content_status").set_text(unicode(self.status.simple_content_status()))
self.get_widget("prop_status").set_text(unicode(self.status.simple_metadata_status()))
self.set_icon_from_status(self.get_widget("content_status_icon"),
self.status.simple_content_status())
self.set_icon_from_status(self.get_widget("prop_status_icon"),
self.status.simple_metadata_status())
self.set_icon_from_status(self.get_widget("vcs_icon"),
self.status.single, ICON_SIZE_DIALOG)
additional_props_table = rabbitvcs.ui.widget.KeyValueTable(
self.get_additional_info())
additional_props_table.show()
self.get_widget("file_info_table").pack_start(additional_props_table,
expand=False,
fill=False,
padding=0)
def set_icon_from_status(self, icon, status, size=ICON_SIZE_BUTTON):
if status in rabbitvcs.ui.STATUS_EMBLEMS:
icon.set_from_icon_name("emblem-" + STATUS_EMBLEMS[status], size)
def get_additional_info(self):
vcs_type = rabbitvcs.vcs.guess_vcs(self.path)['vcs']
if(vcs_type == rabbitvcs.vcs.VCS_SVN):
return self.get_additional_info_svn()
else:
return []
def get_additional_info_svn(self):
repo_url = self.vcs.svn().get_repo_url(self.path)
return [
(_("Repository URL"), repo_url)]
class FileInfoExpander(rabbitvcs.ui.GtkBuilderWidgetWrapper):
gtkbuilder_filename = "property_page"
gtkbuilder_id = "file_info_expander"
def __init__(self, path, vcs=None, claim_domain=True):
# Might be None, but that's okay, only subclasses use it
self.vcs = vcs
rabbitvcs.ui.GtkBuilderWidgetWrapper.__init__(self,
claim_domain=claim_domain)
self.path = path
self.get_widget("file_expander_path").set_label(path)
# Do a lazy evaluate for this
self.file_info = None
self.expander = self.get_widget()
# There seems to be no easy way to connect to this in gtkbuilder
self.expander.connect("notify::expanded", self.on_expand)
def on_expand(self, param_spec, user_data):
if self.expander.get_expanded() and not self.file_info:
self.file_info = FileInfoPane(self.path, self.vcs,
claim_domain=self.claim_domain
).get_widget()
self.expander.add(self.file_info)
class PropertyPageLabel(rabbitvcs.ui.GtkBuilderWidgetWrapper):
gtkbuilder_filename = "property_page"
gtkbuilder_id = "property_page_label"
|
Ernti/GG | gg/GGobjectlist.py | Python | gpl-2.0 | 491 | 0 | '''
Created on 17 Dec 2013
@author: | tore
'''
class ObjectList(object):
def __init__(self):
self.objectlist = []
self.windowlist = []
def addObject(self, listobject):
self.objectlist.append(listobject)
def removeObject(self, listobject):
self.objectlist.remove(listobject)
def addWindow(self, listobject):
self.windowlist.append(listobject)
def removeWindow(self, listobject):
se | lf.windowlist.remove(listobject)
|
bollu/polymage | sandbox/apps/python/img_proc/lens_blur/main.py | Python | apache-2.0 | 632 | 0.007911 | import numpy as np
import time
import sys
from __init__ i | mport *
from i | nit import init_all
from printer import print_header, print_config, print_line
from builder import create_lib,build_lensblur
from exec_pipe import lensblur
#from app_tuner import auto_tune
app = "lens_blur"
def main():
print_header()
app_data = {}
app_data['app'] = app
app_data['ROOT'] = ROOT
init_all(app_data)
print_config(app_data)
if app_data['mode'] == 'tune':
#auto_tune(pipe_data,app_data)
pass
else:
create_lib(build_lensblur, app, app_data)
lensblur(app_data)
return
main()
|
baroquehq/baroque | baroque/datastructures/counters.py | Python | mit | 1,120 | 0.002679 | from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] | += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
in | t
"""
return self.events_count_by_type.get(type(eventtype), 0)
|
greglandrum/rdkit | rdkit/VLib/NodeLib/SmilesOutput.py | Python | bsd-3-clause | 2,131 | 0 | # $Id$
#
# Copyright (C) 2003 Rational Discovery LLC
# All Rights Reserved
#
from rdkit import Chem
from rdkit.VLib.Output import OutputNode as BaseOutputNode
class OutputNode(BaseOutputNode):
""" dumps smiles output
Assumptions:
- destination supports a write() method
- inputs (parents) can be stepped through in lockstep
Usage Example:
>>> smis = ['C1CCC1','C1CC1','C=O','NCC']
>>> mols = [Chem.MolFromSmiles(x) for x in smis]
>>> from rdkit.VLib.Supply import SupplyNode
>>> suppl = SupplyNode(contents=mols)
>>> from io import | StringIO
>>> sio = StringIO()
>>> node = OutputNode(dest=sio,delim=', ')
>>> node.AddParent(suppl)
>>> ms = [x for x in node]
>>> len(ms)
4
>>> txt = sio.getvalue()
>>> repr(txt)
"'1, C1CCC1\\\\n2, C1CC1\\\\n3, C=O\\\\n4, CCN\\\\n'"
"""
def __init__(self, dest=None, delim='\t', idField=None, **kwargs):
| BaseOutputNode.__init__(self, dest=dest, strFunc=self.smilesOut)
self._dest = dest
self._idField = idField
self._delim = delim
self._nDumped = 0
def reset(self):
BaseOutputNode.reset(self)
self._nDumped = 0
def smilesOut(self, mol):
self._nDumped += 1
if isinstance(mol, (tuple, list)):
args = mol
mol = args[0]
if len(args) > 1:
args = list(args[1:])
else:
args = []
else:
args = []
if self._idField and mol.HasProp(self._idField):
label = mol.GetProp(self._idField)
else:
label = str(self._nDumped)
smi = Chem.MolToSmiles(mol)
outp = [label, smi] + args
return '%s\n' % (self._delim.join(outp))
# ------------------------------------
#
# doctest boilerplate
#
def _runDoctests(verbose=None): # pragma: nocover
import doctest
import sys
failed, _ = doctest.testmod(optionflags=doctest.ELLIPSIS, verbose=verbose)
sys.exit(failed)
if __name__ == '__main__': # pragma: nocover
_runDoctests()
|
summanlp/textrank | test/test_keywords.py | Python | mit | 7,150 | 0.003776 | import unittest
from summa.keywords import keywords
from summa.preprocessing.textcleaner import deaccent
from numpy import isclose
from .utils import get_text_from_test_data
class TestKeywords(unittest.TestCase):
def test_text_keywords(self):
text = get_text_from_test_data("mihalcea_tarau.txt")
# Calculate keywords
generated_keywords = keywords(text, split=True)
# To be compared to the reference.
reference_keywords = get_text_from_test_data("mihalcea_tarau.kw.txt").split("\n")
self.assertEqual({str(x) for x in generated_keywords}, {str(x) for x in reference_keywords})
def test_text_keywords_wempty_stoplist(self):
text = get_text_from_test_data("mihalcea_tarau.txt")
additional_stoplist = []
generated_keywords = keywords(text, split=True, additional_stopwords=additional_stoplist)
reference_keywords = get_text_from_test_data("mihalcea_tarau.kw.txt").split("\n")
self.assertEqual({str(x) for x in generated_keywords}, {str(x) for x in reference_keywords})
def test_text_keywords_wstoplist(self):
text = get_text_from_test_data("mihalcea_tarau.txt")
additional_stoplist = get_text_from_test_data("mihalcea_tarau.sw.txt").strip().split(",")
generated_keywords = keywords(text, split=True, additional_stopwords=additional_stoplist)
reference_keywords = get_text_from_test_data("mihalcea_tarau.swkw.txt").split("\n")
self.assertEqual({str(x) for x in generated_keywords}, {str(x) for x in reference_keywords})
def test_keywords_few_distinct_words_is_empty_string(self):
text = get_text_from_test_data("few_distinct_words.txt")
self.assertEqual(keywords(text), "")
def test_keywords_few_distinct_words_wempty_stoplist_is_empty_string(self):
text = get_text_from_test_data("few_distinct_words.txt")
self.assertEqual(keywords(text,additional_stopwords=[]), "")
def test_keywords_few_distinct_words_w_stoplist_is_empty_string(self):
text = get_text_from_test_data("few_distinct_words.txt")
additional_stopwords = ["here","there"]
self.assertEqual(keywords(text,additional_stopwords=additional_stopwords), "")
def test_keywords_few_distinct_words_split_is_empty_list(self):
text = get_text_from_test_data("few_distinct_words.txt")
self.assertEqual(keywords(text, split=True), [])
def test_keywords_few_distinct_words_wempty_stoplist_split_is_empty_list(self):
text = get_text_from_test_data("few_distinct_words.txt")
self.assertEqual(keywords(text, split=True, additional_stopwords=[]), [])
def test_keywords_few_distinct_words_w_stoplist_split_is_empty_list(self):
text = get_text_from_test_data("few_distinct_words.txt")
additional_stopwords = ["here","there"]
self.assertEqual(keywords(text, split=True, additional_stopwords=additional_stopwords), [])
def test_text_summarization_on_short_input_text_and_split_is_not_empty_list(self):
text = get_text_from_test_data("unrelated.txt")
# Keeps the first 8 sentences to make the text shorter.
text = "\n".join(text.split('\n')[:8])
self.assertNotEqual(keywords(text, split=True), [])
def test_text_summarization_on_short_input_text_is_not_empty_string(self):
text = get_text_from_test_data("unrelated.txt")
# Keeps the first 8 sentences to make the text shorter.
text = "\n".join(text.split('\n')[:8])
self.assertNotEqual(keywords(text, split=True), "")
def test_keywords_ratio(self):
text = get_text_from_test_data("mihalcea_tarau.txt")
# Check ratio parameter is well behaved.
# Because length is taken on tokenized clean text we just check that
# ratio 40% is twice as long as ratio 20%
selected_docs_20 = keywords(text, ratio=0.2, split=True)
selected_docs_40 = keywords(text, ratio=0.4, split=True)
self.assertAlmostEqual(float(len(selected_docs_40)) / len(selected_docs_20), 0.4 / 0.2, places=1)
def test_keywords_ratio_wstopwords(self):
text = get_text_from_test_data("mihalcea_tarau.txt")
additional_stoplist = get_text_from_test_data("mihalcea_tarau.sw.txt").strip().split(",")
# Check ratio parameter is well behaved.
# Because length is taken on tokenized clean text we just check that
# ratio 40% is twice as long as ratio 20%
selected_docs_20 = keywords(text, ratio=0.2, split=True, additional_stopwords=additional_stoplist)
selected_docs_40 = keywords(text, ratio=0.4, split=True, additional_stopwords=additional_stoplist)
actual_ratio = float(len(selected_docs_40)) / len(selected_docs_20)
expected_ratio = 0.4 / 0.2
# Expect the same ratio with a relative tolerance of 5%.
self.assertTrue(isclose(actual_ratio, expected_ratio, rtol=0.5), "Ratio between number of keywords should be 2.")
def test_keywords_consecutive_keywords(self):
text = "Rabbit populations known to be plentiful, large, and diverse \
in the area. \
Adjacent to the site, a number number well over a thousand. \
The number of these rabbit populations has diminished in recent \
years, and perhaps we have become number to a number of their \
numbers numbering fewer."
# Should not raise an exception.
self.assertIsNotNone(keywords(text, words=10))
def test_repeated_keywords(self):
text = get_text_from_test_data("repeated_keywords.txt")
kwds = keywords(text)
self.assertTrue(len(kwds.splitlines()))
def test_repeated_keywords_wstopwords(self):
text = get_text_from_test_data("repeated_keywords.txt")
additional_stoplist = ["sage","user"]
kwds = keywords(text,additional_stopwords=additional_stoplist)
self.assertTrue(len(kwds.splitlines()))
def test_spanish_without_accents(self):
# Test the keyword extraction with accented characters.
text = get_text_from_test_data("spanish.txt")
kwds = keywords(text, language="spanish", deaccent=True, split=True)
# Verifies that all words are retrieved without accents.
self.assertTrue(all(deaccent(keyword) == keyword for keyword in kwds))
def test_spanish_with_accents(self):
# Test the keyword extraction with accented cha | racters.
text = get_text_from_test_data("spanish.txt")
kwds = keywords(text, language="spanish", deaccent=False, split=True)
# Verifies that there are some keywords are retrieved with accents.
self.assertTrue(any(deaccent(keyword) != keyword for keyword in kwds))
def test_text_as_bytes_raises_exception(self):
# Test the key | word extraction for a text that is not a unicode object
# (Python 3 str).
text = get_text_from_test_data("spanish.txt")
bytes = text.encode(encoding="utf-8")
with self.assertRaises(ValueError):
keywords(bytes, language="spanish")
if __name__ == '__main__':
unittest.main() |
waterponey/scikit-learn | sklearn/ensemble/tests/test_gradient_boosting.py | Python | bsd-3-clause | 40,529 | 0.000099 | """
Testing for the gradient boosting module (sklearn.ensemble.gradient_boosting).
"""
import warnings
import numpy as np
from itertools import product
from scipy.sparse import csr_matrix
from scipy.sparse import csc_matrix
from scipy.sparse import coo_matrix
from sklearn import datasets
from sklearn.base import clone
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.ensemble.gradient_boosting import ZeroEstimator
from sklearn.metrics import mean_squared_error
from sklearn.utils import check_random_state, tosequence
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import skip_if_32bit
from sklearn.exceptions import DataConversionWarning
from sklearn.exceptions import NotFittedError
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [-1, 1, 1]
rng = np.random.RandomState(0)
# also load the boston dataset
# and randomly permute it
boston = datasets.load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
# also load the iris dataset
# and randomly permute it
iris = datasets.load_iris()
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
def check_classification_toy(presort, loss):
# Check classification on a toy dataset.
clf = GradientBoostingClassifier(loss=loss, n_estimators=10,
random_state=1, presort=presort)
assert_raises(ValueError, clf.predict, T)
clf.fit(X, y)
assert_array_equal(clf.predict(T), true_result)
assert_equal(10, len(clf.estimators_))
deviance_decrease = (clf.train_score_[:-1] - clf.train_score_[1:])
assert_true(np.any(deviance_decrease >= 0.0))
leaves = clf.apply(X)
assert_equal(leaves.shape, (6, 10, 1))
def test_classification_toy():
for presort, loss in product(('auto', True, False),
('deviance', 'exponential')):
yield check_classification_toy, presort, loss
def test_parameter_checks():
# Check input parameter validation.
assert_raises(ValueError,
GradientBoostingClassifier(n_estimators=0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(n_estimators=-1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(learning_rate=0.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(learning_rate=-1.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='foobar').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_split=0.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_split=-1.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_split=1.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_leaf=0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_leaf=-1.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_weight_fraction_leaf=-1.).fit,
X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_weight_fraction_leaf=0.6).fit,
X, y)
assert_raises(ValueError,
GradientBoostingClassifier(subsample=0.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(subsample=1.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(subsample=-0.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(max_depth=-0.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(max_depth=0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(init={}).fit, X, y)
# test fit before feature importance
assert_raises(ValueError,
lambda: GradientBoostingClassifier().feature_importances_)
# deviance requires ``n_classes >= 2``.
| assert_raises(ValueError,
lambda X, y: Gradie | ntBoostingClassifier(
loss='deviance').fit(X, y),
X, [0, 0, 0, 0])
def test_loss_function():
assert_raises(ValueError,
GradientBoostingClassifier(loss='ls').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='lad').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='quantile').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='huber').fit, X, y)
assert_raises(ValueError,
GradientBoostingRegressor(loss='deviance').fit, X, y)
assert_raises(ValueError,
GradientBoostingRegressor(loss='exponential').fit, X, y)
def check_classification_synthetic(presort, loss):
# Test GradientBoostingClassifier on synthetic dataset used by
# Hastie et al. in ESLII Example 12.7.
X, y = datasets.make_hastie_10_2(n_samples=12000, random_state=1)
X_train, X_test = X[:2000], X[2000:]
y_train, y_test = y[:2000], y[2000:]
gbrt = GradientBoostingClassifier(n_estimators=100, min_samples_split=2,
max_depth=1, loss=loss,
learning_rate=1.0, random_state=0)
gbrt.fit(X_train, y_train)
error_rate = (1.0 - gbrt.score(X_test, y_test))
assert_less(error_rate, 0.09)
gbrt = GradientBoostingClassifier(n_estimators=200, min_samples_split=2,
max_depth=1, loss=loss,
learning_rate=1.0, subsample=0.5,
random_state=0,
presort=presort)
gbrt.fit(X_train, y_train)
error_rate = (1.0 - gbrt.score(X_test, y_test))
assert_less(error_rate, 0.08)
def test_classification_synthetic():
for presort, loss in product(('auto', True, False), ('deviance', 'exponential')):
yield check_classification_synthetic, presort, loss
def check_boston(presort, loss, subsample):
# Check consistency on dataset boston house prices with least squares
# and least absolute deviation.
ones = np.ones(len(boston.target))
last_y_pred = None
for sample_weight in None, ones, 2 * ones:
clf = GradientBoostingRegressor(n_estimators=100,
loss=loss,
max_depth=4,
subsample=subsample,
min_samples_split=2,
random_state=1,
presort=presort)
assert_raises(ValueError, clf.predict, boston.data)
clf.fit(boston.data, boston.target,
sample_weight=sample_weight)
leaves = clf.apply(boston.data)
assert_equal(leaves.shape, (506, 100))
y_pred = clf.predict(boston.data)
mse = mean_squared_error(boston.target, y_pred)
assert_less(mse, 6.0)
if last_y_pred is not None:
assert_array_almost_equal(last_y_pred, y_pred)
last_y_pred = y_pred
def test_boston():
for presort, loss, subsample in product(('auto', True, False),
('ls', 'lad', 'huber'),
|
andybondar/CloudFerry | cloudferrylib/os/network/neutron.py | Python | apache-2.0 | 47,578 | 0.000504 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import ipaddr
from neutronclient.common import exceptions as neutron_exc
from neutronclient.v2_0 import client as neutron_client
from cloudferrylib.base import network
from cloudferrylib.utils import utils as utl
LOG = utl.get_log(__name__)
DEFAULT_SECGR = 'default'
class NeutronNetwork(network.Network):
"""
The main class for working with OpenStack Neutron client
"""
def __init__(self, config, cloud):
super(NeutronNetwork, self).__init__(config)
self.cloud = cloud
self.identity_client = cloud.resources['identity']
self.neutron_client = self.proxy(self.get_client(), config)
self.ext_net_map = \
utl.read_yaml_file(self.config.migrate.ext_net_map) or {}
def get_client(self):
return neutron_client.Client(
username=self.config.cloud.user,
password=self.config.cloud.password,
tenant_name=self.config.cloud.tenant,
auth_url=self.config.cloud.auth_url)
def read_info(self, **kwargs):
"""Get info about neutron resources:
:rtype: Dictionary with all necessary neutron info
"""
tenant_id = ''
if not self.config.migrate.all_networks:
tenant_name = self.config.cloud.tenant
tenant_id = self.identity_client.get_tenant_id_by_name(tenant_name)
info = {'networks': self.get_networks(tenant_id),
'subnets': self.get_subnets(tenant_id),
'routers': self.get_routers(tenant_id),
'floating_ips': self.get_floatingips(tenant_id),
'security_groups': self.get_sec_gr_and_rules(tenant_id),
'meta': {}}
if self.config.migrate.keep_lbaas:
info['lbaas'] = dict()
info['lb_pools'] = self.get_lb_pools()
info['lb_monitors'] = self.get_lb_monitors()
info['lb_members'] = self.get_lb_members()
info['lb_vips'] = self.get_lb_vips()
return info
def deploy(self, info):
deploy_info = info
self.upload_networks(deploy_info['networks'])
self.upload_subnets(deploy_info['networks'],
deploy_info['subnets'])
self.upload_routers(deploy_info['networks'],
deploy_info['subnets'],
deploy_info['routers'])
if self.config.migrate.keep_floatingip:
self.upload_floatingips(deploy_info['networks'],
deploy_info['floating_ips'])
self.upload_neutron_security_groups(deploy_info['security_groups'])
self.upload_sec_group_rules(deploy_info['security_groups'])
if self.config.migrate.keep_lbaas:
self.upload_lb_pools(deploy_info['lb_pools'],
deploy_info['subnets'])
self.upload_lb_monitors(deploy_info['lb_monitors'])
self.associate_lb_monitors(deploy_info['lb_pools'],
deploy_info['lb_monitors'])
self.upload_lb_members(deploy_info['lb_members'],
deploy_info['lb_pools'])
self.upload_lb_vips(deploy_info['lb_vips'],
deploy_info['lb_pools'],
deploy_info['subnets'])
def get_func_mac_address(self, instance):
return self.get_mac_by_ip
def get_mac_by_ip(self, ip_address):
for port in self.get_list_ports():
for fixed_ip_info in port['fixed_ips']:
if fixed_ip_info['ip_address'] == ip_address:
return port["mac_address"]
def get_list_ports(self, **kwargs):
return self.neutron_client.list_ports(**kwargs)['ports']
def create_port(self, net_id, mac, ip, tenant_id, keep_ip, sg_ids=None):
param_create_port = {'network_id': net_id,
'mac_address': mac,
'tenant_id': tenant_id}
if sg_ids:
param_create_port['security_groups'] = sg_ids
if keep_ip:
param_create_port['fixed_ips'] = [{"ip_address": ip}]
return self.neutron_client.create_port({
'port': param_create_port})['port']
def delete_port(self, port_id):
return self.neutron_client.delete_port(port_id)
def get_network(self, network_info, tenant_id, keep_ip=False):
if keep_ip:
instance_addr = ipaddr.IPAddress(network_info['ip'])
for snet in self.neutron_client.list_subnets()['subnets']:
network = self.get_network({"id": snet['network_id']}, None)
if snet['tenant_id'] == tenant_id or network['shared']:
if ipaddr.IPNetwork(snet['cidr']).Contains(instance_addr):
return self.neutron_client.\
list_networks(id=snet['network_id'])['networks'][0]
if 'id' in network_info:
return self.neutron_client.\
list_networks(id=network_info['id'])['networks'][0]
if 'name' in network_info:
return self.neutron_client.\
list_networks(name=network_info['name'])['networks'][0]
else:
raise Exception("Can't find suitable network")
def check_existing_port(self, network_id, mac):
for port in self.get_list_ports(fields=['network_id',
'mac_address', 'id']):
if (port['network_id'] == network_id) \
and (port['mac_address'] == mac):
return port['id']
return None
@staticmethod
def convert(neutron_object, cloud, obj_name):
"""Convert OpenStack Neutron network object to CloudFerry object.
:param neutron_object: Direct OS NeutronNetwork object to convert,
:cloud: Cloud object,
:obj_name: Name of NeutronNetwork object to convert.
List of possible values:
'network', 'subnet', 'router', 'floating_ip',
'security_group', 'rule'.
"""
obj_map = {
'network': NeutronNetwork.convert_networks,
'subnet': NeutronNetwork.convert_subnets,
'router': NeutronNetwork.convert_routers,
'floating_ip': NeutronNetwork.convert_floatingips,
'security_group': NeutronNetwork.convert_security_groups,
'rule': NeutronNetwork.convert_rules,
'lb_pool': NeutronNetwork.convert_lb_pools,
'lb_member': NeutronNetwork.convert_lb_members,
| 'lb_monitor': NeutronNetwork.convert_lb_monitors,
'lb_vip': NeutronNetwork.convert_lb_vips
}
return obj_map[obj_name](neutron_object, cloud)
@staticmethod
def con | vert_networks(net, cloud):
identity_res = cloud.resources[utl.IDENTITY_RESOURCE]
net_res = cloud.resources[utl.NETWORK_RESOURCE]
get_tenant_name = identity_res.get_tenants_func()
subnet_names = []
for subnet in net['subnets']:
name = net_res.neutron_client.show_subnet(subnet)['subnet']['name']
subnet_names.append(name)
result = {
'name': net['name'],
'id': net['id'],
'admin_state_up': net['admin_state_up'],
'shared': net['shared'],
'tenant_id': net['tenant_id'],
'tenant_name': get_tenant_name(net['tenant_id']),
'subnet_names': subnet_names,
'router:exte |
hexlism/css_platform | sleepyenv/bin/pilfont.py | Python | apache-2.0 | 209 | 0 | #!/home/firlism/tools/css_platform/sleepyenv/bin/python
# EASY-INSTALL-SCRIPT: 'Pillow==2.8.2','pil | font.py'
__requires__ = 'Pillow==2.8.2'
__import__('pkg_resources').run_script('Pillow= | =2.8.2', 'pilfont.py')
|
Physiolution-Polska/MoDDiss | gui/__init__.py | Python | gpl-2.0 | 52 | 0 | """
Package for graphics user interface | meth | ods
"""
|
sevaivanov/various | python/server/twisted/purely-twisted.py | Python | mit | 887 | 0.00451 | import sys
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.protocol import ServerFactory, ClientFactory, Protocol
class EchoServerProtocol(Protocol):
def dataReceived(self, data):
log.msg('Data received %s' % data)
self.transport.write(b'Server push back: %s' % data)
de | f connectionMade(self):
log.msg('Client connection from %s' % self.transport.getPeer())
def connectionLost(self, reason):
log.msg('Lost connection because %s' % reason)
class EchoServerFactory(ServerFactory):
def buildProtocol(self, addr):
return EchoServerProtocol()
if __name__ == '__main__':
log.startLoggi | ng(sys.stdout)
log.msg('Starting twisted engines...')
server = EchoServerFactory()
reactor.listenTCP(8080, server)
log.msg('Listening on http://127.0.0.1:8080')
reactor.run()
|
Eleyvie/wreck | integrated/headers/header_items.py | Python | mit | 14,843 | 0.019605 | ###################################################
# header items.py
# This file contains declarations for items
# DO NOT EDIT THIS FILE!
###################################################
#item flags
itp_type_horse = 0x0000000000000001
itp_type_one_handed_wpn = 0x0000000000000002
itp_type_two_handed_wpn = 0x0000000000000003
itp_type_polearm = 0x0000000000000004
itp_type_arrows = 0x0000000000000005
itp_type_bolts = 0x0000000000000006
itp_type_shield = 0x0000000000000007
itp_type_bow = 0x0000000000000008
itp_type_crossbow = 0x0000000000000009
itp_type_thrown = 0x000000000000000a
itp_type_goods = 0x000000000000000b
itp_type_head_armor = 0x000000000000000c
itp_type_body_armor = 0x000000000000000d
itp_type_foot_armor = 0x000000000000000e
itp_type_hand_armor = 0x000000000000000f
itp_type_pistol = 0x0000000000000010
itp_type_musket = 0x0000000000000011
itp_type_bullets = 0x0000000000000012
itp_type_animal = 0x0000000000000013
itp_type_book = 0x0000000000000014
itp_force_attach_left_hand = 0x0000000000000100
itp_force_attach_right_hand = 0x0000000000000200
itp_force_attach_left_forearm = 0x0000000000000300
itp_attach_armature = 0x0000000000000f00
itp_attachment_mask = 0x0000000000000f00
itp_unique = 0x0000000000001000
itp_always_loot = 0x0000000000002000 # Was itp_melee
itp_no_parry = 0x0000000000004000 # Was itp_spear
itp_default_ammo = 0x0000000000008000
itp_merchandise = 0x0000000000010000
itp_wooden_attack = 0x0000000000020000
itp_wooden_parry = 0x0000000000040000
itp_food = 0x0000000000080000
itp_cant_reload_on_horseback = 0x0000000000100000
itp_two_handed = 0x0000000000200000
itp_primary = 0x0000000000400000
itp_secondary = 0x0000000000800000
itp_covers_legs = 0x0000000001000000
itp_doesnt_cover_hair = 0x0000000001000000
itp_can_penetrate_shield = 0x0000000001000000
itp_consumable = 0x0000000002000000
itp_bonus_against_shield = 0x0000000004000000
itp_penalty_with_shield = 0x0000000008000000
itp_cant_use_on_horseback = 0x0000000010000000
itp_civilian = 0x0000000020000000
itp_next_item_as_melee = 0x0000000020000000
itp_fit_to_head = 0x0000000040000000
itp_offset_lance = 0x0000000040000000
itp_covers_head = 0x0000000080000000
itp_couchable = 0x0000000080000000
itp_crush_through = 0x0000000100000000
#itp_knock_back = 0x0000000200000000 being used?
itp_remove_item_on_use = 0x0000000400000000
itp_unbalanced = 0x0000000800000000
itp_covers_beard = 0x0000001000000000
itp_no_pick_up_from_ground = 0x0000002000000000
itp_can_knock_down = 0x0000004000000000
itp_covers_hair = 0x0000008000000000 #remove hair mesh for armors only
itp_force_show_body = 0x0000010000000000 # forces showing body (works on body armor items)
itp_force_show_left_hand = 0x0000020000000000 # forces showing left hand (works on hand armor items)
itp_force_show_right_hand = 0x0000040000000000 # forces showing right hand (works on hand armor items)
itp_extra_penetration = 0x0000100000000000
itp_has_bayonet = 0x0000200000000000
itp_cant_reload_while_moving = 0x0000400000000000
itp_ignore_gravity = 0x0000800000000000
itp_ignore_friction = 0x0001000000000000
itp_is_pike = 0x0002000000000000
itp_offset_musket = 0x0004000000000000
itp_no_blur = 0x0008000000000000
itp_cant_reload_while_moving_mounted = 0x0010000000000000
itp_has_upper_stab = 0x0020000000000000
itp_kill_info_mask = 0x0700000000000000
itp_kill_info_bits = 56
#equipment slots
ek_item_0 = 0
ek_item_1 = 1
ek_item_2 = 2
ek_item_3 = 3
ek_head = 4
ek_body = 5
ek_foot = 6
ek_gloves = 7
ek_horse = 8
ek_food = 9
max_inventory_items = 96
num_equipment_kinds = ek_food + 1
num_weapon_proficiencies = 7
#damage types:
cut = 0
pierce = 1
blunt = 2
ibf_armor_mask = 0x00000000000000000000000ff
ibf_damage_mask = 0x00000000000000000000003ff
ibf_10bit_mask = 0x00000000000000000000003ff
ibf_head_armor_bits = 0
ibf_body_armor_bits = 8
ibf_leg_armor_bits = 16
ibf_weight_bits = 24
ibf_difficulty_bits = 32
ibf_hitpoints_mask = 0x0000ffff
ibf_hitpoints_bits = 40
iwf_swing_damage_bits = 50
iwf_swing_damage_typ | e_bits = 58
iwf_thrust_damage_bits = 60
iwf_thrust_damage_type_bits = 68
iwf_weapon_length_bits = 70
iwf_speed_rating_bits = 80
iwf_shoot_speed_bits = 90
iwf_max_ammo_bits = 100 # use this for shield endurance too?
iwf_abundance_bits = 110
iwf_accuracy_bits = 16 #reuse leg_armor for accuracy
iwf_damage_type_bits = 8
def get_weight(y):
a = (y >> ibf_weight_bits) & ibf | _armor_mask
return 0.25 * a
def get_head_armor(y):
return (y >> ibf_head_armor_bits) & ibf_armor_mask
def get_body_armor(y):
return (y >> ibf_body_armor_bits) & ibf_armor_mask
def get_leg_armor(y):
return (y >> ibf_leg_armor_bits) & ibf_armor_mask
def get_difficulty(y):
return (y >> ibf_difficulty_bits) & ibf_armor_mask
def get_hit_points(y):
return (y >> ibf_hitpoints_bits) & ibf_hitpoints_mask
def get_speed_rating(y):
return (y >> iwf_speed_rating_bits) & ibf_armor_mask
def get_missile_speed(y):
return (y >> iwf_shoot_speed_bits) & ibf_10bit_mask
def get_weapon_length(y):
return ((y >> iwf_weapon_length_bits) & ibf_10bit_mask)
def get_max_ammo(y):
return (y >> iwf_max_ammo_bits) & ibf_armor_mask
def get_swing_damage(y):
return (y >> iwf_swing_damage_bits) & ibf_damage_mask
def get_thrust_damage(y):
return (y >> iwf_thrust_damage_bits) & ibf_damage_mask
def get_abundance(y):
abnd = (y >> iwf_abundance_bits) & ibf_armor_mask
if (abnd == 0):
abnd = 100
return abnd
def custom_kill_info(x): # you have to add ico_custom_x (where x is a number between 1 and 7) mesh in order to display it correctly.
return (((bignum | x) & (itp_kill_info_mask >> itp_kill_info_bits)) << itp_kill_info_bits)
# Item capabilities:
itcf_thrust_onehanded = 0x0000000000000001
itcf_overswing_onehanded = 0x0000000000000002
itcf_slashright_onehanded = 0x0000000000000004
itcf_slashleft_onehanded = 0x0000000000000008
itcf_thrust_twohanded = 0x0000000000000010
itcf_overswing_twohanded = 0x0000000000000020
itcf_slashright_twohanded = 0x0000000000000040
itcf_slashleft_twohanded = 0x0000000000000080
itcf_thrust_polearm = 0x0000000000000100
itcf_overswing_polearm = 0x0000000000000200
itcf_slashright_polearm = 0x0000000000000400
itcf_slashleft_polearm = 0x0000000000000800
itcf_shoot_bow = 0x0000000000001000
itcf_shoot_javelin = 0x0000000000002000
itcf_shoot_crossbow = 0x0000000000004000
itcf_throw_stone = 0x0000000000010000
itcf_throw_knife = 0x0000000000020000
itcf_throw_axe = 0x0000000000030000
itcf_throw_javelin = 0x0000000000040000
itcf_shoot_pistol = 0x0000000000070000
itcf_shoot_musket |
shsingh/ansible | lib/ansible/modules/network/aci/aci_l3out_route_tag_policy.py | Python | gpl-3.0 | 6,992 | 0.001573 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = ty | pe
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOC | UMENTATION = r'''
---
module: aci_l3out_route_tag_policy
short_description: Manage route tag policies (l3ext:RouteTagPol)
description:
- Manage route tag policies on Cisco ACI fabrics.
version_added: '2.4'
options:
rtp:
description:
- The name of the route tag policy.
type: str
required: yes
aliases: [ name, rtp_name ]
description:
description:
- The description for the route tag policy.
type: str
aliases: [ descr ]
tenant:
description:
- The name of the tenant.
type: str
required: yes
aliases: [ tenant_name ]
tag:
description:
- The value of the route tag.
- Accepted values range between C(0) and C(4294967295).
- The APIC defaults to C(4294967295) when unset during creation.
type: int
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
name_alias:
version_added: '2.10'
description:
- The alias for the current object. This relates to the nameAlias field in ACI.
type: str
extends_documentation_fragment: aci
notes:
- The C(tenant) used must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
seealso:
- module: aci_tenant
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(l3ext:RouteTagPol).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Dag Wieers (@dagwieers)
'''
# FIXME: Add more, better examples
EXAMPLES = r'''
- aci_l3out_route_tag_policy:
host: apic
username: admin
password: SomeSecretPassword
rtp: '{{ rtp_name }}'
tenant: production
tag: '{{ tag }}'
description: '{{ description }}'
delegate_to: localhost
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
rtp=dict(type='str', aliases=['name', 'rtp_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
tag=dict(type='int'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
name_alias=dict(type='str'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['rtp', 'tenant']],
['state', 'present', ['rtp', 'tenant']],
],
)
rtp = module.params.get('rtp')
description = module.params.get('description')
tag = module.params.get('tag')
state = module.params.get('state')
tenant = module.params.get('tenant')
name_alias = module.params.get('name_alias')
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='l3extRouteTagPol',
aci_rn='rttag-{0}'.format(rtp),
module_object=rtp,
target_filter={'name': rtp},
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='l3extRouteTagPol',
class_config=dict(
name=rtp,
descr=description, tag=tag,
nameAlias=name_alias,
),
)
aci.get_diff(aci_class='l3extRouteTagPol')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
|
Davidhw/WikipediaEditScrapingAndAnalysis | scraping_wikipedia_links/popular_articles/popular_articles/items.py | Python | gpl-2.0 | 276 | 0.003623 | # Define here | the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
from scrapy.item import Item, Field
class PopularArticlesItem(Item):
# define the fields | for your item here like:
# name = Field()
pass
|
3dinfluence/opensprinklerlib | opensprinklerlib/controller/gpio_compatible.py | Python | mit | 1,943 | 0.002059 | #!/usr/bin/env python
try:
# Attempt to load RPi Module
import RPi.GPIO as GPIO
except:
try:
# Attempt to load Beagle Bone Module
import Adafruit_BBIO.GPIO as GPIO
except:
pass
from controller import *
class GPIOCompatible(Controller):
"""GPIO interface compatible Controller abstract class.
Currently used by the rpi and beagle bone controller classes
"""
__metacl | ass__ = abc.ABCMeta
def __init__(se | lf):
"""GPIO Compatible Class Initializer
"""
super(GPIOCompatible, self).__init__()
def __del__(self):
self._shiftOut()
GPIO.cleanup()
def _init_hardware(self):
self._setup_pins()
self._init_gpio()
@abc.abstractmethod
def _setup_pins(self):
return
def _init_gpio(self):
GPIO.setup(self._pin_sr_clk, GPIO.OUT)
GPIO.setup(self._pin_sr_noe, GPIO.OUT)
self._disableShiftRegisterOutput()
GPIO.setup(self._pin_sr_dat, GPIO.OUT)
GPIO.setup(self._pin_sr_lat, GPIO.OUT)
self._shiftOut()
self._enableShiftRegisterOutput()
def _enableShiftRegisterOutput(self):
GPIO.output(self._pin_sr_noe, False)
def _disableShiftRegisterOutput(self):
GPIO.output(self._pin_sr_noe, True)
def _shiftOut(self):
GPIO.output(self._pin_sr_clk, False) # need to test to see if this is necessary
GPIO.output(self._pin_sr_lat, False)
for s in range(0, self._MAX_NSTATIONS):
GPIO.output(self._pin_sr_clk, False)
GPIO.output(self._pin_sr_dat, 1 if (self._station_bits[self._MAX_NSTATIONS-1-s] == 1) else 0)
GPIO.output(self._pin_sr_clk, True)
GPIO.output(self._pin_sr_lat, True)
def enable(self, sid):
self._station_bits[sid - 1] = 1
self._shiftOut()
def disable(self, sid):
self._station_bits[sid - 1] = 0
self._shiftOut()
|
ancho85/pylint-playero-plugin | tests/input/func_noerror_query_heir.py | Python | gpl-2.0 | 522 | 0.011494 | # pylint:disable=R0201
from OpenOrange import *
from User import User
from RetroactiveAccounts import RetroactiveAccounts
class | HeirFinder(RetroactiveAccounts):
def doReplacements(self, txt):
d = {1:"ONE", 2:"TWO"}
us = User.bring("USER")
txt = txt.replace(":1", us.Name + d[1])
return txt
def run(self):
query8 = self.getQuery()
query8.sql = self.doReplacements(query8.sql)
#pylint:disable=E | 6601
query8.open() #there will be missing tables here
|
mmnelemane/nova | nova/tests/unit/api/ec2/test_ec2_validate.py | Python | apache-2.0 | 11,301 | 0.001327 | # Copyright 2012 Cloudscaling, Inc.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_config import cfg
from oslo_utils import timeutils
from nova.api.ec2 import cloud
from nova.api.ec2 import ec2utils
from nova.compute import utils as compute_utils
from nova import context
from nova import db
from nova import exception
from nova import test
from nova.tests.unit import cast_as_call
from nova.tests.unit import fake_network
from nova.tests.unit import fake_notifier
from nova.tests.unit.image import fake
CONF = cfg.CONF
CONF.import_opt('compute_driver', 'nova.virt.driver')
class EC2ValidateTestCase(test.TestCase):
def setUp(self):
super(EC2ValidateTestCase, self).setUp()
self.flags(compute_driver='nova.virt.fake.FakeDriver')
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
| fake_network.set_stub_network_methods(self.stubs)
# set up our cloud
self.cloud = cloud.CloudController()
# Short-circuit the conductor service
self.flags(use_local=True, group='conductor')
# Stub out the notification service so we use the no-op serializer
# and avoid lazy-load traces with the wrap_exception decorator in
# the compute service.
fake_ | notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
# set up services
self.conductor = self.start_service('conductor',
manager=CONF.conductor.manager)
self.compute = self.start_service('compute')
self.scheduter = self.start_service('scheduler')
self.network = self.start_service('network')
self.image_service = fake.FakeImageService()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
self.EC2_MALFORMED_IDS = ['foobar', '', 123]
self.EC2_VALID__IDS = ['i-284f3a41', 'i-001', 'i-deadbeef']
self.ec2_id_exception_map = [(x,
exception.InvalidInstanceIDMalformed)
for x in self.EC2_MALFORMED_IDS]
self.ec2_id_exception_map.extend([(x, exception.InstanceNotFound)
for x in self.EC2_VALID__IDS])
self.volume_id_exception_map = [(x,
exception.InvalidVolumeIDMalformed)
for x in self.EC2_MALFORMED_IDS]
self.volume_id_exception_map.extend([(x, exception.VolumeNotFound)
for x in self.EC2_VALID__IDS])
def fake_show(meh, context, id, **kwargs):
return {'id': id,
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available'}}
def fake_detail(self, context, **kwargs):
image = fake_show(self, context, None)
image['name'] = kwargs.get('name')
return [image]
fake.stub_out_image_service(self.stubs)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
# make sure we can map ami-00000001/2 to a uuid in FakeImageService
db.s3_image_create(self.context,
'cedef40a-ed67-4d10-800e-17455edce175')
db.s3_image_create(self.context,
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
def tearDown(self):
super(EC2ValidateTestCase, self).tearDown()
fake.FakeImageService_reset()
# EC2_API tests (InvalidInstanceID.Malformed)
def test_console_output(self):
for ec2_id, e in self.ec2_id_exception_map:
self.assertRaises(e,
self.cloud.get_console_output,
context=self.context,
instance_id=[ec2_id])
def test_describe_instance_attribute(self):
for ec2_id, e in self.ec2_id_exception_map:
self.assertRaises(e,
self.cloud.describe_instance_attribute,
context=self.context,
instance_id=ec2_id,
attribute='kernel')
def test_instance_lifecycle(self):
lifecycle = [self.cloud.terminate_instances,
self.cloud.reboot_instances,
self.cloud.stop_instances,
self.cloud.start_instances,
]
for cmd in lifecycle:
for ec2_id, e in self.ec2_id_exception_map:
self.assertRaises(e,
cmd,
context=self.context,
instance_id=[ec2_id])
def test_create_image(self):
for ec2_id, e in self.ec2_id_exception_map:
self.assertRaises(e,
self.cloud.create_image,
context=self.context,
instance_id=ec2_id)
def test_create_snapshot(self):
for ec2_id, e in self.volume_id_exception_map:
self.assertRaises(e,
self.cloud.create_snapshot,
context=self.context,
volume_id=ec2_id)
def test_describe_volumes(self):
for ec2_id, e in self.volume_id_exception_map:
self.assertRaises(e,
self.cloud.describe_volumes,
context=self.context,
volume_id=[ec2_id])
def test_delete_volume(self):
for ec2_id, e in self.volume_id_exception_map:
self.assertRaises(e,
self.cloud.delete_volume,
context=self.context,
volume_id=ec2_id)
def test_detach_volume(self):
for ec2_id, e in self.volume_id_exception_map:
self.assertRaises(e,
self.cloud.detach_volume,
context=self.context,
volume_id=ec2_id)
class EC2TimestampValidationTestCase(test.NoDBTestCase):
"""Test case for EC2 request timestamp validation."""
def test_validate_ec2_timestamp_valid(self):
params = {'Timestamp': '2011-04-22T11:29:49Z'}
expired = ec2utils.is_ec2_timestamp_expired(params)
self.assertFalse(expired)
def test_validate_ec2_timestamp_old_format(self):
params = {'Timestamp': '2011-04-22T11:29:49'}
expired = ec2utils.is_ec2_timestamp_expired(params)
self.assertTrue(expired)
def test_validate_ec2_timestamp_not_set(self):
params = {}
expired = ec2utils.is_ec2_timestamp_expired(params)
self.assertFalse(expired)
def test_validate_ec2_timestamp_ms_time_regex(self):
result = ec2utils._ms_time_regex.match('2011-04-22T11:29:49.123Z')
self.assertIsNotNone(result)
result = ec2utils._ms_time_regex.match('2011-04-22T11:29:49.123456Z')
self.assertIsNotNone(result)
result = ec2utils._ms_time_regex.match('2011-04 |
erdavila/git-svn-diff | tests/impl/git.py | Python | mit | 1,477 | 0.024374 | import os.path
import subprocess
import shutil
class GitImpl(object):
def __init__(self):
from svn import SvnImpl
svn_impl = SvnImpl(suffix='-git')
shutil.rmtree(svn_impl.client_path)
self.temp_path = svn_impl.temp_path
self.server_path = svn_impl.server_path
self.client_path = os.path.join(self.temp_path, 'client.git')
subprocess.check_call(['git', 'svn', 'clone', 'file://' + self.server_path, self.client_path])
def save_file(self, filename, content):
with open(os.path.join(self.client_path, filename), 'w') as f:
f.write(content)
def add_new_file(self, *files):
subprocess.check_call(['git', 'add'] + list(files), cwd=self.client_path)
def remove_file(self, *files):
subprocess.check_call(['git', 'rm'] + list(files), cwd=self.client_path)
def commit_all(self, message):
subprocess.check_call(['git', 'commit', '-a', '-m', message], cwd=self.client_path)
subprocess.check_call(['git', 'svn', 'dcommit'], cwd=self.client_path)
def diff(self, *revs):
assert len(revs) <= 1
if len(revs) == 0:
revisions = ['HEAD']
else:
commit = subprocess.check_output(['git', 'svn', 'find-rev', 'r%d' % revs[0]], cwd=self.client_path)
commit = commi | t.strip()
assert commit != ''
revisions = [commit]
diff_file = os.path.join(self.temp_path, 'git.diff')
with open(diff_file, 'w') as f:
cmd = ['git', 'diff', '-- | no-prefix'] + revisions
subprocess.check_call(cmd, cwd=self.client_path, stdout=f)
return diff_file
|
irdan/marionette | marionette_tg/__init__.py | Python | apache-2.0 | 6,400 | 0.001563 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import random
sys.path.append('.')
from twisted.internet import reactor
import marionette_tg.driver
import marionette_tg.multiplexer
import marionette_tg.record_layer
import marionette_tg.updater
EVENT_LOOP_FREQUENCY_S = 0.01
AUTOUPDATE_DELAY = 5
class MarionetteException(Exception):
pass
class Client(object):
def __init__(self, format_name, format_version):
self.multiplexer_outgoing_ = marionette_tg.multiplexer.BufferOutgoing()
self.multiplexer_incoming_ = marionette_tg.multiplexer.BufferIncoming()
self.multiplexer_incoming_.addCallback(self.process_cell)
self.streams_ = {}
self.stream_counter_ = random.randint(1,2**32-1)
self.set_driver(format_name, format_version)
self.reload_ = False
# first update must be
reactor.callLater(AUTOUPDATE_DELAY, self.check_for_update)
def set_driver(self, format_name, format_version=None):
self.format_name_ = format_name
if format_version == None:
self.format_version_ = marionette_tg.dsl.get_latest_version(
'client', format_name)
else:
self.format_version_ = format_version
self.driver_ = marionette_tg.driver.ClientDriver("client")
self.driver_.set_multiplexer_incoming(self.multiplexer_incoming_)
self.driver_.set_multiplexer_outgoing(self.multiplexer_outgoing_)
self.driver_.setFormat(self.format_name_, self.format_version_)
def get_format(self):
retval = str(self.format_name_) + \
':' + \
str(self.format_version_)
return retval
def execute(self, reactor):
if self.driver_.isRunning():
self.driver_.execute(reactor)
else:
if self.reload_:
self.set_driver(self.format_name_)
self.reload_ = False
self.driver_.reset()
reactor.callLater(EVENT_LOOP_FREQUENCY_S, self.execute, reactor)
def process_cell(self, cell_obj):
payload = cell_obj.get_payload()
if payload:
stream_id = cell_obj.get_stream_id()
self.streams_[stream_id].srv_queue.put(payload)
def start_new_stream(self, srv_queue=None):
stream = marionette_tg.multiplexer.MarionetteStream(
self.multiplexer_incoming_,
self.multiplexer_outgoing_,
self.stream_counter_,
srv_queue)
stream.host = self
self.streams_[self.stream_counter_] = stream
self.stream_counter_ = random.randint(1,2**32-1)
return stream
def terminate(self, stream_id):
del self.streams_[stream_id]
# call this function if you want reload formats from disk
# at the next possible time
def reload_driver(self):
self.reload_ = True
def check_for_update(self):
# uncomment the following line to check for updates every N seconds
# instead of just on startup
# reactor.callLater(N, self.check_for_update, reactor)
if marionette_tg.conf.get("general.autoupdate"):
self.do_update(self.reload_driver)
def do_update(self, callback):
# could be replaced with code that updates from a different
# source (e.g., local computations)
update_server = marionette_tg.conf.get("general.update_server")
updater = marionette_tg.updater.FormatUpdater(update_server, use_marionette=True, callback=callback)
return updater.do_update()
class Server(object):
| factory = None
def __init__(self, format_name):
self.multiplexer_outgoing_ = marionette_tg.multiplexer.BufferOutgoing()
self.multiplexer_incoming_ = marionette_tg.multiplexer.BufferIncoming()
self.multiplexer_incoming_ | .addCallback(self.process_cell)
self.factory_instances = {}
if self.check_for_update():
self.do_update()
self.set_driver(format_name)
self.reload_ = False
def set_driver(self, format_name):
self.format_name_ = format_name
self.driver_ = marionette_tg.driver.ServerDriver("server")
self.driver_.set_multiplexer_incoming(self.multiplexer_incoming_)
self.driver_.set_multiplexer_outgoing(self.multiplexer_outgoing_)
self.driver_.setFormat(self.format_name_)
def execute(self, reactor):
if not self.driver_.isRunning():
if self.reload_:
self.set_driver(self.format_name_)
self.reload_ = False
self.driver_.execute(reactor)
reactor.callLater(EVENT_LOOP_FREQUENCY_S, self.execute, reactor)
def process_cell(self, cell_obj):
cell_type = cell_obj.get_cell_type()
stream_id = cell_obj.get_stream_id()
if cell_type == marionette_tg.record_layer.END_OF_STREAM:
self.factory_instances[stream_id].connectionLost()
del self.factory_instances[stream_id]
elif cell_type == marionette_tg.record_layer.NORMAL:
if not self.factory_instances.get(stream_id):
stream = marionette_tg.multiplexer.MarionetteStream(
self.multiplexer_incoming_, self.multiplexer_outgoing_,
stream_id)
self.factory_instances[stream_id] = self.factory()
self.factory_instances[stream_id].connectionMade(stream)
payload = cell_obj.get_payload()
if payload:
self.factory_instances[stream_id].dataReceived(payload)
# call this function if you want reload formats from disk
# at the next possible time
def reload_driver(self):
self.reload_ = True
def check_for_update(self):
# uncomment the following line to check for updates every N seconds
# instead of just on startup
# reactor.callLater(N, self.check_for_update, reactor)
if marionette_tg.conf.get("general.autoupdate"):
self.do_update(self.reload_driver)
def do_update(self, callback):
# could be replaced with code that updates from a different
# source (e.g., local computations)
update_server = marionette_tg.conf.get("general.update_server")
updater = marionette_tg.updater.FormatUpdater(update_server, use_marionette=False, callback=callback)
return updater.do_update()
|
djpine/pyman | Book/chap7/Supporting Materials/daysBtwnDates.py | Python | cc0-1.0 | 740 | 0.02027 | import numpy as np
def leapyear(year):
if year%4 != 0:
return False
elif year%400 == 0:
return True
elif year%100 == 0:
return False
else:
return True
def leapyearcount(years):
a = np.where(years%4==0, 1, 0)
b = np.where(years%100==0, -1, 0)
c = np.where(years%400==0, 1, 0)
return np.sum(a+b+c)
def daysBtwnDates(startDate, endDate):
syear = int(startDate[:4])
eyear = int(endDate[:4])
return syear, eyear
for year in [1899, 1900, 1901, 1923, 1924, 1925, 1999, 2000, 2001, 2008, 2009]:
print("{0:d} is a leap year: {1}".format(year, l | eapyear(year)))
print(leapyearcount(np.arange(1899,1999)))
out = daysBtwnDates("2012-Sep-21", "2 | 013-Jan-15")
print(out) |
marrow/WebCore | web/core/__init__.py | Python | mit | 424 | 0.004717 | # encoding: utf-8
# ## Imports
from threading import local | as __local
# Expose these | as importable from the top-level `web.core` namespace.
from .application import Application
from .util import lazy
# ## Module Globals
__all__ = ['local', 'Application', 'lazy'] # Symbols exported by this package.
# This is to support the web.ext.local extension, and allow for early importing of the variable.
local = __local()
|
master-q/ATS-Postiats-contrib | contrib/libatscc/libatscc2py/CATS/string_cats.py | Python | mit | 791 | 0.007585 | ######
#
# HX-20 | 14-08:
# for Python code translated from ATS
#
######
######
#beg of [string_cats.py]
######
######
from ats2pypre_basics_cats import *
######
############################################
def atspre_strlen(x): return (x.__len__())
############################################
def ats2pypre_string_get_at(x, i): return(x[i])
###################################### | ######
def ats2pypre_string_isalnum(x): return (x.isalnum())
def ats2pypre_string_isalpha(x): return (x.isalpha())
def ats2pypre_string_isdecimal(x): return (x.isdecimal())
############################################
def ats2pypre_string_lower(x): return (x.lower())
def ats2pypre_string_upper(x): return (x.upper())
############################################
###### end of [string_cats.py] ######
|
stryder199/RyarkAssignments | Assignment2/web2py/gluon/storage.py | Python | mit | 5,788 | 0.002592 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds
- Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
import cPickle
import portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds return None
instead of IndexOutOfBounds
"""
def __call__(self, i, default=None):
if 0<=i<len(self):
return self[i]
else:
return default
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
def __getattr__(self, key):
if key in self:
return self[key]
else:
return None
def __setattr__(self, key, value):
if value == None:
if key in self:
del self[key]
else:
self[key] = value
def __delattr__(self, key):
if key in self:
del self[key]
else:
raise AttributeError, "missing key=%s" % key
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
for (k, v) in value.items():
self[k] = v
def getlist(self, key):
"""Return a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, [value] will be returned.
Example output for a query string of ?x=abc&y=abc&y=def
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, None)
if isinstance(value, (list, tuple)):
return value
elif value is None:
return []
return [value]
def getfirst(self, key):
"""Return the first or only value when given a request.vars-style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of ?x=abc&y=abc&y=def
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
value = self.getlist(key)
if len(value):
return value[0]
return None
def getlast(self, key):
"""Returns the last or only single value when given a request.vars-style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of ?x=abc&y=abc&y=def
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
value = self.getlist(key)
if len(value):
return value[-1]
return None
class StorageList(Storage):
"""
like Storage but missing elements default to [] instead of None
"""
def __getattr__(self, key):
if key in self:
return self[key]
else:
sel | f[key] = []
return self[key]
def load_sto | rage(filename):
fp = open(filename, 'rb')
portalocker.lock(fp, portalocker.LOCK_EX)
storage = cPickle.load(fp)
portalocker.unlock(fp)
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = open(filename, 'wb')
portalocker.lock(fp, portalocker.LOCK_EX)
cPickle.dump(dict(storage), fp)
portalocker.unlock(fp)
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self.get('lock_keys', None)\
and not key in self:
raise SyntaxError, 'setting key \'%s\' does not exist' % key
if key != 'lock_values' and self.get('lock_values', None):
raise SyntaxError, 'setting value cannot be changed: %s' % key
self[key] = value
class Messages(Storage):
def __init__(self, T):
self['T'] = T
def __setattr__(self, key, value):
if key != 'lock_keys' and self.get('lock_keys', None)\
and not key in self:
raise SyntaxError, 'setting key \'%s\' does not exist' % key
if key != 'lock_values' and self.get('lock_values', None):
raise SyntaxError, 'setting value cannot be changed: %s' % key
self[key] = value
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return str(self['T'](value))
return value
if __name__ == '__main__':
import doctest
doctest.testmod()
|
biosustain/marsi | marsi/cli/app.py | Python | apache-2.0 | 1,357 | 0.000737 | # Copyright 2016 Chr. Hansen A/S and The Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Licen | se at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cement.core.foundation import CementApp
from marsi.cli.controllers import MarsiBaseController
from marsi.cli.controllers.chemistry | import ChemistryController
from marsi.cli.controllers.database import DatabaseController
from marsi.cli.controllers.modeling import OptimizationController
class MarsiApp(CementApp):
class Meta:
label = 'marsi'
base_controller = 'base'
handlers = [
MarsiBaseController,
DatabaseController,
OptimizationController,
ChemistryController
]
def main():
try:
with MarsiApp() as app:
app.run()
except KeyboardInterrupt:
print("Cancelled by user")
exit(130)
|
ArchAssault-Project/archassaultweb | todolists/migrations/0005_add_slugs.py | Python | gpl-2.0 | 9,702 | 0.007833 | # -*- coding: utf-8 -*-
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.template.defaultfilters import slugify
class Migration(DataMigration):
def forwards(self, orm):
existing = list(orm.Todolist.objects.values_list(
'slug', flat=True).distinct())
for item in orm.Todolist.objects.defer('raw').filter(slug=None):
suffixed = slug = slugify(item.name)
suffix = 1
while suffixed in existing:
suffix += 1
suffixed = "%s-%d" % (slug, suffix)
item.slug = suffixed
existing.append(suffixed)
item.save()
def backwards(self, orm):
orm.Todolist.objects.all.update(slug=None)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.arch': {
'Meta': {'ordering': "('name',)", 'object_name': 'Arch', 'db_table': "'arches'"},
'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'main.package': {
'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"},
'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'on_delete': 'models.PROTECT', 'to': "orm['main.Arch']"}),
'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'compressed_size': ('main.fields.PositiveBigIntegerField', [], {}),
'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed_size': ('main.fields.PositiveBigIntegerField', [], {}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'pkgdesc': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkgver': ('django.db.models.fie | lds.CharField', [], {'max_length': '255'}),
'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'on_delete': 'models.PROTECT', 'to': "orm['main.Repo']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'main.repo': {
| 'Meta': {'ordering': "('name',)", 'object_name': 'Repo', 'db_table': "'repos'"},
'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'todolists.todolist': {
'Meta': {'object_name': 'Todolist'},
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_todolists'", 'on_delete': 'models.PROTECT', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True'}),
'raw': ('django.db.models.fields.TextField', |
rajpushkar83/pbs | cloudmesh_pbs/DbPBS.py | Python | apache-2.0 | 4,729 | 0.000846 | from __future__ import print_function
import os
import abc
import shelve
from pprint import pprint
from cloudmesh_base.tables import dict_printer
from cloudmesh_base.Shell import Shell
from cloudmesh_base.util import banner
from cloudmesh_base.util import path_expand
from cloudmesh_pbs.OpenPBS import OpenPBS
class pbs_db_interface(object):
__metaclass__ = abc.ABCMeta
db = None
def data(self):
return dict(self.db)
def __getitem__(self, index):
return self.db[index]
def __setitem__(self, index, value):
self.db[index] = value
@abc.abstractmethod
def load(self, filename):
"""loads the saved databsa from the file"""
@abc.abstractmethod
def get(self, id):
"""get the object with the id"""
@abc.abstractmethod
def set(self, id, value):
"""set the objet with the id to value"""
def set_filename(self, filename):
"""set the objet with the id to value"""
self.filename = filename
def remove(self):
try:
os.remove(self.filename)
except:
pass
@abc.abstractmethod
def save(self):
"""save the cloudmesh_job"""
@abc.abstractmethod
def update(self):
"""load the cloudmesh_job"""
class DbPBS(pbs_db_interface):
def __init__(self, filename=None):
self.pbs = OpenPBS(deploy=True)
self.open()
def open(self, filename=None):
if filename is not None:
self.filename = filename
else:
self.filename = path_expand(self.pbs.database_filename())
path = os.path.dirname(self.filename)
Shell.mkdir(path)
self.load()
def clear(self):
for id in self.db:
del self.db[id]
self.save()
def load(self):
"""load the cloudmesh_job"""
print('loading', self.filename)
# remove db ending so that shelve automatically adds it
self.filename = self.filename.replace(".db", "")
self.db = shelve.open(self.filename, writeback=True)
def save(self):
self.db.sync()
def get(self, id):
return self.db[id]
def status(self, id):
return self.get(id)["job_state"]
def set(self, id, value):
self.db[id] = value
self.save()
def keys(self):
self.data().keys()
def delete(self, id):
del self.db[id]
def close(self):
self.db.close()
def update(self, host=None, user=True):
if host is None:
print("host is none is not supported yet")
raise
print("QSTAT")
r = dict(self.pbs.qstat(host, user=user, format='dict'))
pprint(r)
if r is not {}:
for jobid in r:
self.db[jobid] = r[jobid]
self.save()
else:
print("no jobs found after query")
print("update completed")
def info(self):
print("Filename:", self.filename)
def list(self, attributes=None, output="table"):
if self.db is None or len(self.db) == 0:
print("No jobs found")
return None
columns = attributes
if columns is None:
columns = ["cm_jobid", "cm_host", "cm_user", "Job_Name", "job_state", "exit_status"]
# prepare the dict
d = {}
for jobid in self.db:
content = {}
for attribute in columns:
try:
content[attribute] = self.db[jobid][attribute]
except:
content[attribute] = "None"
d[jobid] = content
# print the dict
if output in ["csv", "table", "dict", "yaml"]:
return dict_printer(d, order=columns, output=ou | tput)
return None
def qsub(self, name, host, script, template=None, kind="dict"):
r = self.pbs.qsub(name, host, script, template=template, kind=kind)
pprint(r)
return dict(r)
if __name__ == "__main__":
qsub = False
db = DbPBS()
db.clear()
db.info | ()
db.update(host="india", user=False)
print(db.list(output="table"))
print(db.list(output="csv"))
print(db.list(output="dict"))
print(db.list(output="yaml"))
banner("user")
db.clear()
db.update(host="india")
print(db.list(output="table"))
if qsub:
banner('qsub')
pbs = OpenPBS()
jobname = "job-" + pbs.jobid + ".pbs"
host = "india"
script_template = pbs.read_script("etc/job.pbs")
print(script_template)
r = db.qsub(jobname, host, 'echo "Hello"', template=script_template)
pprint(r)
banner('variable list')
pprint(OpenPBS.variable_list(r)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.