repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
caioau/personal | fluxos/lista 2/grafo-6.py | Python | gpl-3.0 | 1,795 | 0.015042 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 27 01:20:46 2016
@author: caioau
"""
import matplotlib.pyplot as plt
import networkx as nx
from networkx.drawing.nx_agraph import graphviz_layout
def main():
G = nx.Graph() # G eh um grafo direcionado
# gera o grafo apartir de suas arestas
G.add_weighted_edges_from([(1, 2, 2), (1, 3, 15), (2, 3, 24), (2, 4, 4), (2, 5, 11), (3, 4, 2), (3, 5, 10), (4, 5, 5), (4, 6, 15), (5, 6, 18)])
for i in G.edges():
# print i[0], i[1]
G[i[0]][i[1]][" | color"] = "black"
# G[1][2]["color"] = "red"
comprimento, caminho = nx.single_source_dijkstra(G, 1)
print caminho
for i in caminho:
# print i, comprimento[i], caminho[i]
for j in range(1, len(caminho[i])):
print caminho[i][j-1], caminho[i] | [j]
G[caminho[i][j-1]][caminho[i][j]]["color"] = "red"
desenhaGrafo(G, "grafo-6.png")
T = nx.minimum_spanning_tree(G)
desenhaGrafo(T, "grafo-6arv.png")
def desenhaGrafo(G,pngfilename): # desenha o grafo e salva numa imagem png
edge_labels=dict([((u,v,),d['weight']) # gera os labels das arestas
for u,v,d in G.edges(data=True)])
colors = [G[u][v]['color'] for u,v in G.edges()]
pos = graphviz_layout(G,prog='fdp') # obtem a posicao dos nos (para desenhalo) # TODO: desativar isso?
nx.draw_networkx_edges(G,pos, edge_color=colors) # desenha as arestas
nx.draw_networkx_labels(G,pos) # desenha os labels das arestas
nx.draw_networkx_edge_labels(G,pos,edge_labels=edge_labels) # desenha os labels dos nos
nx.draw_networkx_nodes(G,pos,node_color='w') # desenha os nos
plt.axis('off') # desativa os eixos
plt.savefig(pngfilename)
plt.close("all")
if __name__ == "__main__":
main()
|
Pretagonist/Flexget | tests/test_metainfo.py | Python | mit | 6,791 | 0.003534 | from __future__ import unicode_literals, division, absolute_import
import pytest
class TestMetainfo(object):
config = """
tasks:
test_content_size:
mock:
- {title: 'size 10MB', description: 'metainfo content size should parse size 10.2MB from this'}
- {title: 'size 200MB', description: 'metainfo content size should parse size 200MB from this'}
- {title: 'size 1024MB', description: 'metainfo content size should parse size 1.0GB from this'}
"""
def test_content_size(self, execute_task):
"""Metainfo: parse content size"""
task = execute_task('test_content_size')
assert task.find_entry(content_size=10), 'Content size 10 MB absent'
assert task.find_entry(content_size=200), 'Content size 200 MB absent'
assert task.find_entry(content_size=1024), 'Content size 1024 MB absent'
class TestMetainfoImdb(object):
config = """
tasks:
test:
mock:
- {title: 'Scan Test 1', description: 'title: Foo Bar Asdf\n imdb-url: http://www.imdb.com/title/tt0330793/ more text'}
- {title: 'Scan Test 2', description: '<a href="http://imdb.com/title/tt0472198/">IMDb</a>'}
- {title: 'Scan Test 3', description: 'nothing here'}
- {title: 'Scan Test 4', description: 'imdb.com/title/tt66666 http://imdb.com/title/tt99999'}
"""
def test_imdb(self, execute_task):
"""Metainfo: imdb url"""
task = execute_task('test')
assert task.find_entry(imdb_url='http://www.imdb.com/title/tt0330793/'), \
'Failed to pick url from test 1'
assert task.find_entry(imdb_url='http://www.imdb.com/title/tt0472198/'), \
'Failed to pick url from test 2'
assert not task.find_entry(imdb_url='http://www.imdb.com/title/tt66666/'), \
'Failed to ignore multiple imdb urls in test 4'
assert not task.find_entry(imdb_url='http://www.imdb.com/title/tt99999/'), \
'Failed to ignore multiple imdb urls in test 4'
class TestMetainfoQuality(object):
config = """
tasks:
test:
mock:
- {title: 'FooBar.S01E02.720p.HDTV'}
- {title: 'ShowB.S04E19.Name of Ep.720p.WEB-DL.DD5.1.H.264'}
- {title: 'Good.Movie.hdtv', description: '720p'}
"""
def test_quality(self, execute_task):
task = execute_task('test')
entry = task.find_entry(title='FooBar.S01E02.720p.HDTV')
assert entry, 'entry not found?'
assert 'quality' in entry, 'failed to pick up quality'
assert entry['quality'].name == '720p hdtv', 'picked up wrong quality %s' % entry.get('quality', None)
entry = task.find_entry(title='ShowB.S04E19.Name of Ep.720p.WEB-DL.DD5.1.H.264')
assert entry, 'entry not found?'
assert 'quality' in entry, 'failed to pick up quality'
assert entry['quality'].name == '720p webdl h264 dd5.1', \
'picked up wrong quality %s' % entry.get('quality', None)
# quality in description should not override one found in title
entry = task.find_entry(title='Good.Movie.hdtv')
assert 'quality' in entry, 'failed to pick up quality'
assert entry['quality'].name == 'hdtv', 'picked up wrong quality %s' % entry.get('quality', None)
class TestMetainfoSeries(object):
_config = """
templates:
global:
parsing:
series: __parser__
metainfo_series: yes
tasks:
test:
mock:
- {title: 'FlexGet.S01E02.TheName.HDTV.xvid'}
- {title: 'some.series.S03E14.Title.Here.720p'}
- {title: '[the.group] Some.Series.S03E15.Title.Two.720p'}
- {title: 'HD 720p: Some series.S03E16.Title.Three'}
- {title: 'Something.Season.2.1of4.Ep.Title.HDTV.torrent'}
- {title: 'Show-A (US) - Episode Title S02E09 hdtv'}
- {title: | "Jack's.Show.S03E01.blah.1080p"}
fals | e_positives:
mock:
- {title: 'FlexGet.epic'}
- {title: 'FlexGet.Apt.1'}
- {title: 'FlexGet.aptitude'}
- {title: 'FlexGet.Step1'}
- {title: 'Something.1x0.Complete.Season-FlexGet'}
- {title: 'Something.1xAll.Season.Complete-FlexGet'}
- {title: 'Something Seasons 1 & 2 - Complete'}
- {title: 'Something Seasons 4 Complete'}
- {title: 'Something.S01D2.DVDR-FlexGet'}
"""
@pytest.fixture(scope='class', params=['internal', 'guessit'], ids=['internal', 'guessit'])
def config(self, request):
"""Override and parametrize default config fixture for all series tests."""
return self._config.replace('__parser__', request.param)
def test_metainfo_series(self, execute_task):
"""Metainfo series: name/episode"""
# We search for series name in title case to make sure case is being normalized
task = execute_task('test')
assert task.find_entry(series_name='Flexget', series_season=1, series_episode=2, quality='hdtv xvid'), \
'Failed to parse series info'
assert task.find_entry(series_name='Some Series', series_season=3, series_episode=14, quality='720p'), \
'Failed to parse series info'
assert task.find_entry(series_name='Something', series_season=2, series_episode=1, quality='hdtv'), \
'Failed to parse series info'
# Test unwanted prefixes get stripped from series name
assert task.find_entry(series_name='Some Series', series_season=3, series_episode=15, quality='720p'), \
'Failed to parse series info'
assert task.find_entry(series_name='Some Series', series_season=3, series_episode=16, quality='720p'), \
'Failed to parse series info'
# Test episode title and parentheses are stripped from series name
assert task.find_entry(series_name='Show-a Us', series_season=2, series_episode=9, quality='hdtv'), \
'Failed to parse series info'
assert task.find_entry(series_name='Jack\'s Show', series_season=3, series_episode=1, quality='1080p'), \
'Failed to parse series info'
def test_false_positives(self, execute_task):
"""Metainfo series: check for false positives"""
task = execute_task('false_positives')
for entry in task.entries:
# None of these should be detected as series
error = '%s should not be detected as a series' % entry['title']
assert 'series_name' not in entry, error
assert 'series_guessed' not in entry, error
assert 'series_parser' not in entry, error
|
peterstark72/shapelink | shapelink.py | Python | mit | 7,087 | 0.016932 | #!/usr/bin/env python
import urllib
import urllib2
import json
import hashlib
import datetime
from collections import OrderedDict
AUTH_REQUIRETOKEN = "/auth/requiretoken"
BASEURL = "http://api.shapelink.com"
class ShapelinkException( Exception ):
pass
SHAPELINK_METHODS = {
'get_token' : {
'required' : ['username', 'password'],
'optional' : [],
'endpoint' : "/auth/requiretoken"
},
'get_user' : {
'required' : ['user_token'],
'optional' : ['user_id'],
'endpoint' : "/user/get"
},
'get_profile' : {
'required' : ['user_token', 'culture'],
'optional' : ['user_id'],
'endpoint' : "/user/getProfile"
},
'get_day' : {
'required' : ['user_token', 'date', 'culture'],
'optional' : [],
'endpoint' : "/diary/getDay"
},
'get_activities' : {
'required' : ['user_token', 'type', 'culture'],
'optional' : [],
'endpoint' : "/diary/getActivities"
},
'get_user_challenges' : {
'required' : ['user_token', 'culture'],
'optional' : ['active', 'user_id','string'],
'endpoint' : "/challenge/getUserChallenges"
},
'get_challenge' : {
'required' : ['user_token', 'challenge_id', 'culture'],
'optional' : [],
'endpoint' : "/challenge/getChallenge"
},
'get_challenges' : {
'required' : ['user_token'],
'optional' : ['string', 'type', 'category','hide_my'],
'endpoint' : "/challenge/getChallenges"
},
'get_challengeresult' : {
'required' : ['challenge_id'],
'optional' : [],
'endpoint' : "/challenge/getResults"
},
'save_weight' : {
'required' : ['value', 'date'],
'optional' : ['weight_id','description'],
'endpoint' : "/diary/saveWeightNotation"
},
'get_weight' : {
'required' : ['user_token','weight_id'],
'optional' : [],
'endpoint' : "/diary/getWeightNotation"
},
'get_days' : {
'required' : ['user_token','start_date', 'end_date'],
'optional' : [],
'endpoint' : "/statistics/getDays"
},
'get_summary' : {
'required' : ['user_token'],
'optional' : [],
'endpoint' : "/statistics/getTrainingSummary"
},
'get_health' : {
'required' : ['user_token', 'culture'],
'optional' : [],
'endpoint' : "/statistics/getHealthSummary"
}
}
def loadurl(url):
'''Loads resource from URL. Raises ShapelinkException if error'''
try:
response = json.loads(urllib2.urlopen(url).read())
except IOError:
raise ShapelinkException("Could not access Shapelink server")
status = response.get('status', None)
if not status or status == "error":
raise ShapelinkException(response.get('message'))
return response.get('result')
class ShapelinkAccumulator:
'''Used by Shapelink API object to generate methods for all Shapelink API calls'''
def __init__( self, shapelink_obj, name ):
self.shapelink_obj = shapelink_obj
self.name = name
def __repr__( self ):
return self.name
def __call__(self, *args, **kw ):
return self.shapelink_obj.call_method( self.name, *args, **kw )
class ShapelinkApi(object):
'''Wrapper for the API available from developer.shapelink.com'''
def __init__(self, apikey, secret):
self.apikey = apikey
self.secret = secret
for method, _ in SHAPELINK_METHODS.items():
if not hasattr( self, method ):
setattr( self, method, ShapelinkAccumulator( self, method ))
def call_method(self, method, *args, **kw):
'''Generic method for calling an API endoint'''
meta = SHAPELINK_METHODS[method]
kw['apikey'] = self.apikey
if args:
names = meta['required'] + meta['optional']
for i in range(len(args)):
kw[names[i]] = args[i]
url = BASEURL + meta['endpoint'] + "?" + urllib.urlencode(kw) + "&sig=" + self._calcsig(kw)
return loadurl(url)
def _calcsig(self, params):
'''Signs API parameters according to algorithm at
http://developer.shapelink.com/index.php/Creating_the_request_signature
'''
sorted_params = OrderedDict(sorted(params.items(), key=lambda t: t[0]))
s = ""
for k,v in sorted_params.items():
s += "=".join((k,str(v)))
s += self.secret #don't forget to add secret
return hashlib.md5(s).hexdigest()
def get_api(apikey, secret):
'''Returns API object from given API-key and secret
apikey - Shapelink API-key
secret - Shapelink API-secret
'''
return ShapelinkApi(apikey, secret)
def get_api_fromsecrets(fname):
'''Returns API object from API-key and secret available in the file
fname - name of file that contais the API-key and secret
'''
credentials = json.load(open(fname, "r"))
apikey = credentials.get('APIKEY', None)
secret = credentials.get('SECRET', None)
return ShapelinkApi(apikey, secret)
def get_user(api, username, password):
'''Returns a ShapelinkUser object
api - API object
username - Shapelink username
password - Shapelink password
'''
result = api.get_token(username, password)
return ShapelinkUser(api, result.get('token'))
class ShapelinkUser( | object):
def __init__(self, api, user_token, culture="sv"):
self.user_token = user_token
self.api = api
self.culture = culture
def diar | y(self, date=None):
'''Returns day activity for a specific day, today is default.
date - YYYY-MM-DD
'''
if not date:
date = datetime.datetime.today().strftime("%Y-%m-%d")
return self.api.get_day(self.user_token, date, self.culture)
def profile(self):
return self.api.get_profile(self.user_token, self.culture)
def activities(self, type_):
return self.api.get_activities(self.user_token,
type_, self.culture)
def joined_challenges(self):
'''Returns user's chanllenges'''
return self.api.get_user_challenges(self.user_token, self.culture)
def challenges(self):
'''Returns available chanllenges to join'''
return self.api.get_challenges(self.user_token)
def challenge(self, challenge_id):
'''Returns a specific chanllenge'''
return self.api.get_challenge(self.user_token,
challenge_id, self.culture)
def stats(self, start_date, end_date):
'''Returns day by day stats'''
return self.api.get_days(self.user_token, start_date, end_date)
def save_weight(self, value, date):
return self.api.save_weight(self.user_token, value, date)
def health(self):
return self.api.get_health(self.user_token, self.culture)
|
kdeloach/nyc-trees | src/nyc_trees/apps/survey/views.py | Python | agpl-3.0 | 26,088 | 0 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import os
import json
import shortuuid
from pytz import timezone
from celery import chain
from django_tinsel.exceptions import HttpBadRequestException
from django.conf import settings
from django.contrib.gis.geos import Point
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.urlresolvers import reverse
from django.db import transaction, connection
from django.db.models import Q
from django.http import (HttpResponse, HttpResponseForbidden,
HttpResponseBadRequest)
from django.shortcuts import get_object_or_404, redirect
from django.utils.timezone import now
from django.utils.html import escape
from apps.core.models import User, Group
from apps.core.helpers import (user_is_group_admin, user_is_individual_mapper,
user_is_census_admin)
from apps.event.models import Event
from apps.event.helpers import (user_is_checked_in_to_event,
user_is_rsvped_for_event)
from apps.mail.tasks import notify_reservation_confirmed
from libs.pdf_maps import create_reservations_map_pdf
from apps.users import can_show_full_name
from apps.users.models import TrustedMapper
from apps.survey.models import (BlockfaceReservation, Blockface, Territory,
Survey, Tree, Species, CURB_CHOICES,
STATUS_CHOICES, CERTAINTY_CHOICES,
HEALTH_CHOICES, STEWARDSHIP_CHOICES,
GUARD_CHOICES, SIDEWALK_CHOICES,
PROBLEMS_CHOICES)
from apps.survey.layer_context import (
get_context_for_reservations_layer, get_context_for_reservable_layer,
get_context_for_progress_layer, get_context_for_territory_survey_layer,
get_context_for_printable_reservations_layer,
get_context_for_group_progress_layer, get_context_for_user_progress_layer,
get_context_for_borough_progress_layer, get_context_for_nta_progress_layer
)
from apps.survey.helpers import group_percent_completed
from libs.pdf_maps import create_and_save_pdf
_SURVEY_DETAIL_QUERY_FILE = os.path.join(os.path.dirname(__file__),
'survey_detail.sql')
with open(_SURVEY_DETAIL_QUERY_FILE, 'r') as f:
_S | URVEY_DETAIL_QUERY = f.read()
def progress_page(request):
context = {
'legend_entries': [
{'mode': 'all', 'css_class': 'mapped', 'label': 'Mapped'},
{'mode': 'all', 'css_class' | : 'not-mapped', 'label': 'Not mapped'},
{'mode': 'all', 'css_class': 'unmappable',
'label': 'Could not be mapped'},
{'mode': 'my', 'css_class': 'mapped', 'label': 'Mapped by you'},
{'mode': 'my', 'css_class': 'not-mapped',
'label': 'Not mapped by you'},
{'mode': 'my', 'css_class': 'unmappable',
'label': 'Could not be mapped'},
{'mode': 'group', 'css_class': 'mapped',
'label': 'Mapped by this group'},
{'mode': 'group', 'css_class': 'not-mapped',
'label': 'Not mapped'},
{'mode': 'group', 'css_class': 'unmappable',
'label': 'Could not be mapped'},
],
'percentage_ramps': range(0, 100, 10),
'legend_mode': 'all-percent',
'layer_all': get_context_for_progress_layer(),
'layer_all_nta': get_context_for_nta_progress_layer(),
'layer_all_borough': get_context_for_borough_progress_layer(),
'help_shown': _was_help_shown(request, 'progress_page_help_shown')
}
user = request.user
if user.is_authenticated():
context['layer_my'] = get_context_for_user_progress_layer(request)
blocks = (user.surveys.distinct('blockface')
.values_list('blockface_id', flat=True))
if len(blocks) > 0:
blockfaces = Blockface.objects.filter(id__in=blocks).collect()
context['my_bounds'] = list(blockfaces.extent)
return context
def _was_help_shown(request, help_shown_attr):
"""
help_shown_attr is a user attribute specifying whether help has been
shown on a particular page. We also use it as a session attribute
for non-logged-in users.
Calling this function returns the current attribute value, and also
sets it to True (so it will only be shown once).
"""
help_shown = request.session.get(help_shown_attr, False)
request.session[help_shown_attr] = True
user = request.user
if user.is_authenticated():
user_help_shown = getattr(user, help_shown_attr)
help_shown = help_shown or user_help_shown
if not user_help_shown:
setattr(user, help_shown_attr, True)
user.save()
return help_shown
def progress_page_blockface_popup(request, blockface_id):
blockface = get_object_or_404(Blockface, id=blockface_id)
turf = Territory.objects.filter(blockface_id=blockface_id)
groups = Group.objects.filter(pk=turf.values_list('group_id', flat=True))
group = groups[0] if len(groups) else None
is_active = (group is None or group.is_active or
user_is_group_admin(request.user, group))
survey_type = _get_survey_type(blockface, request.user, group)
return {
'survey_type': survey_type,
'group': group,
'is_active': is_active
}
def _get_survey_type(blockface, user, group):
if user.is_authenticated():
reserved_by_user = BlockfaceReservation.objects \
.filter(blockface=blockface, user=user).current().exists()
if reserved_by_user:
return 'reserved'
try:
latest_survey = Survey.objects \
.filter(blockface=blockface) \
.latest('created_at')
if latest_survey.quit_reason:
return 'unmappable'
if user.is_authenticated() and user.pk in {
latest_survey.user_id, latest_survey.teammate_id}:
return 'surveyed-by-me'
else:
return 'surveyed-by-others'
except Survey.DoesNotExist:
pass
if group is None and blockface.is_available:
return 'available'
return 'unavailable'
def _query_reservation(user, blockface_id):
return BlockfaceReservation.objects \
.filter(blockface_id=blockface_id, user=user) \
.current()
def blockface_cart_page(request):
ids_str = request.POST.get('ids', None)
ids = ids_str.split(',') if ids_str else []
cancelled_reservations = _get_reservations_to_cancel(ids, request.user)
already_reserved_ids = _already_reserved_blockface_ids(ids)
return {
'blockface_ids': request.POST['ids'],
'num_reserved': len(ids) - already_reserved_ids.count(),
'num_cancelled': cancelled_reservations.count()
}
def user_reserved_blockfaces_geojson(request):
reservations = BlockfaceReservation.objects \
.select_related('blockface') \
.filter(user=request.user) \
.current()
est_tz = timezone('US/Eastern')
def get_formatted_expiration_date(reservation):
dt = reservation.expires_at.astimezone(est_tz)
return dt.strftime('%b %-d, %Y')
return [
{
'type': 'Feature',
'geometry': {
'type': 'MultiLineString',
'coordinates': reservation.blockface.geom.coords
},
'properties': {
'id': reservation.blockface.id,
'expires_at': get_formatted_expiration_date(reservation)
}
}
for reservation in reservations
]
def group_borders_geojson(request):
groups = Group.objects.filter(is_active=True)
base_group_layer_context = get_context_for_group_progress_layer()
base_group_tile_url = base_group_layer_context['tile_url']
base_group_grid_url = base_group_layer_context['grid_url']
return [
{
'type': 'Feature',
'geometry': {
'type': 'MultiPolygon',
'coordinates': l |
lielongxingkong/windchimes | swift/common/middleware/slo.py | Python | apache-2.0 | 19,534 | 0.000358 | # Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Middleware that will provide Static Large Object (SLO) support.
This feature is very similar to Dynamic Large Object (DLO) support in that
it allows the user to upload many objects concurrently and afterwards
download them as a single object. It is different in that it does not rely
on eventually consistent container listings to do so. Instead, a user
defined manifest of the object segments is used.
----------------------
Uploading the Manifest
----------------------
After the user has uploaded the objects to be concatenated a manifest is
uploaded. The request | must be a PUT with the query parameter::
?multipart-manifest=put
The body of | this request will be an ordered list of files in
json data format. The data to be supplied for each segment is::
path: the path to the segment (not including account)
/container/object_name
etag: the etag given back when the segment was PUT
size_bytes: the size of the segment in bytes
The format of the list will be::
json:
[{"path": "/cont/object",
"etag": "etagoftheobjectsegment",
"size_bytes": 1048576}, ...]
The number of object segments is limited to a configurable amount, default
1000. Each segment, except for the final one, must be at least 1 megabyte
(configurable). On upload, the middleware will head every segment passed in and
verify the size and etag of each. If any of the objects do not match (not
found, size/etag mismatch, below minimum size) then the user will receive a 4xx
error response. If everything does match, the user will receive a 2xx response
and the SLO object is ready for downloading.
Behind the scenes, on success, a json manifest generated from the user input is
sent to object servers with an extra "X-Static-Large-Object: True" header
and a modified Content-Type. The parameter: swift_bytes=$total_size will be
appended to the existing Content-Type, where total_size is the sum of all
the included segments' size_bytes. This extra parameter will be hidden from
the user.
Manifest files can reference objects in separate containers, which will improve
concurrent upload speed. Objects can be referenced by multiple manifests. The
segments of a SLO manifest can even be other SLO manifests. Treat them as any
other object i.e., use the Etag and Content-Length given on the PUT of the
sub-SLO in the manifest to the parent SLO.
-------------------------
Retrieving a Large Object
-------------------------
A GET request to the manifest object will return the concatenation of the
objects from the manifest much like DLO. If any of the segments from the
manifest are not found or their Etag/Content Length no longer match the
connection will drop. In this case a 409 Conflict will be logged in the proxy
logs and the user will receive incomplete results.
The headers from this GET or HEAD request will return the metadata attached
to the manifest object itself with some exceptions::
Content-Length: the total size of the SLO (the sum of the sizes of
the segments in the manifest)
X-Static-Large-Object: True
Etag: the etag of the SLO (generated the same way as DLO)
A GET request with the query parameter::
?multipart-manifest=get
Will return the actual manifest file itself. This is generated json and does
not match the data sent from the original multipart-manifest=put. This call's
main purpose is for debugging.
When the manifest object is uploaded you are more or less guaranteed that
every segment in the manifest exists and matched the specifications.
However, there is nothing that prevents the user from breaking the
SLO download by deleting/replacing a segment referenced in the manifest. It is
left to the user use caution in handling the segments.
-----------------------
Deleting a Large Object
-----------------------
A DELETE request will just delete the manifest object itself.
A DELETE with a query parameter::
?multipart-manifest=delete
will delete all the segments referenced in the manifest and then the manifest
itself. The failure response will be similar to the bulk delete middleware.
------------------------
Modifying a Large Object
------------------------
PUTs / POSTs will work as expected, PUTs will just overwrite the manifest
object for example.
------------------
Container Listings
------------------
In a container listing the size listed for SLO manifest objects will be the
total_size of the concatenated segments in the manifest. The overall
X-Container-Bytes-Used for the container (and subsequently for the account)
will not reflect total_size of the manifest but the actual size of the json
data stored. The reason for this somewhat confusing discrepancy is we want the
container listing to reflect the size of the manifest object when it is
downloaded. We do not, however, want to count the bytes-used twice (for both
the manifest and the segments it's referring to) in the container and account
metadata which can be used for stats purposes.
"""
from urllib import quote
from cStringIO import StringIO
from datetime import datetime
import mimetypes
from hashlib import md5
from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \
HTTPMethodNotAllowed, HTTPRequestEntityTooLarge, HTTPLengthRequired, \
HTTPOk, HTTPPreconditionFailed, HTTPException, HTTPNotFound, \
HTTPUnauthorized
from swift.common.utils import json, get_logger, config_true_value
from swift.common.constraints import check_utf8, MAX_BUFFERED_SLO_SEGMENTS
from swift.common.http import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED
from swift.common.wsgi import WSGIContext
from swift.common.middleware.bulk import get_response_body, \
ACCEPTABLE_FORMATS, Bulk
def parse_input(raw_data):
"""
Given a request will parse the body and return a list of dictionaries
:raises: HTTPException on parse errors
:returns: a list of dictionaries on success
"""
try:
parsed_data = json.loads(raw_data)
except ValueError:
raise HTTPBadRequest("Manifest must be valid json.")
req_keys = set(['path', 'etag', 'size_bytes'])
try:
for seg_dict in parsed_data:
if (set(seg_dict) != req_keys or
'/' not in seg_dict['path'].lstrip('/')):
raise HTTPBadRequest('Invalid SLO Manifest File')
except (AttributeError, TypeError):
raise HTTPBadRequest('Invalid SLO Manifest File')
return parsed_data
class SloContext(WSGIContext):
def __init__(self, slo, slo_etag):
WSGIContext.__init__(self, slo.app)
self.slo_etag = '"' + slo_etag.hexdigest() + '"'
def handle_slo_put(self, req, start_response):
app_resp = self._app_call(req.environ)
for i in xrange(len(self._response_headers)):
if self._response_headers[i][0].lower() == 'etag':
self._response_headers[i] = ('Etag', self.slo_etag)
break
start_response(self._response_status,
self._response_headers,
self._response_exc_info)
return app_resp
class StaticLargeObject(object):
"""
StaticLargeObject Middleware
See above for a full description.
The proxy logs created for any subrequests made will have swift.source set
to "SLO".
:param app: The next WSGI filter or app in the paste.deploy chain.
:param conf: The configuration dict for the middleware.
"""
def __init__(self, app, conf):
self.conf = conf
self.app = app
self.logger = get_logger(conf, log_route='slo')
self.max_manif |
ksterker/wastesedge | scripts/dialogues/talan_start.py | Python | gpl-2.0 | 11,989 | 0.035032 | import dialogue
import adonthell
# -- pygettext support
def _(message): return message
class talan_start (dialogue.base):
text = [None,\
_("Oh, hullo again, $name."),\
_("Listen, I really am sorry about that fuss at the gate. I hope you will not hold it against me."),\
_("Please do not mention it again. I assure you that I hold no grudge against you."),\
_("Oh... that. Yes, how may I help you?"),\
_("Hello, Talan. Look, I have a couple more questions about the theft."),\
_("In the morning, of course, I heard that Master Fingolso | n's gems had been stolen and that Lady Silverhair was suspected of the theft."),\
_("So you did not see or hear anything out of place that night?"),\
_("No... no, not at all."),\
_("If you do not have any more questions ..."),\
_("No, thank you, I have a lot of work to do. You have been most helpful."),\
_("I hear that Alek Endhelm was very interested in Fingolsons business. Have you notic | ed him doing anything suspicious?"),\
_("Oh, he's a nasty sort, he is, sir. But other than being a busybody, I haven't noticed him doing anything that would lead me to believe that he is the thief."),\
_("Now, if you don't have any more questions..."),\
_("I wish you had not lied to me about the night of the theft."),\
_("Wh... what do you mean?"),\
_("You were not, as you told me, manning your post the whole night."),\
_("Now you look here! I was, and I do not appreciate you calling me a liar."),\
_("Oh. So you do know ... ?"),\
_("Yes I do. And I believe you left your post that night, did you not, Talan?"),\
_("Yes, I did. But please do not tell Jelom, sir! He will have my hide for sure if he finds out! Please, I beg you!"),\
_("No, you missed it. Because you were off singing, weren't you?"),\
_("I'm afraid I was. Oh what terrible mistake! I am so sorry, $name. I did not know about the noise! It is my fault that Lady Silverhair is being held in her room!"),\
_("Calm down, Talan. Now I can prove that something strange happened that night at least."),\
_("And why was Lady Silverhair accused?"),\
_("Well, the most obvious reason is of course that she was interested in the gems, and made a trip here just to buy them."),\
_("But she felt the quality of the gems was too low, and she is hardly pressed for money, so why would she turn around and steal the gems?"),\
_("Well, according to Jelom, what better way to throw suspicion off yourself then to claim that the stones are of low quality?"),\
_("Ah, Jelom said that, did he? What else did he say?"),\
_("Who is Jelom? Your superiour?"),\
_("He is the other guard here. He is not exactly my superiour, but he is older than me and has been here much longer, so he is kind of in charge."),\
_("He found her behaviour suspicious. I mean, she is all high and mighty, acting above the rest of us, and she obviously doesn't like dwarves ..."),\
_("Wait! Why does everybody believe that she has an aversion to dwarves?"),\
_("Well, the way I heard the story, Bjarn made a point of insulting the entire elven race, loudly and publicly, upon her arrival!"),\
_("Well, as I heard it told, she had quite an argument with Master Fingolson in which she insulted the quality of his gems and then the dwarven race!"),\
_("Well, Master Fingolson can be, um ... blunt, I guess, at times, but ..."),\
_("Wasn't the argument rather Fingolson's fault, then?"),\
_("Well, I... ah, perhaps..."),\
_("Maybe you should speak to Jelom. He knows more about the theft and his reasons for suspecting her than I do..."),\
_("Well, you've talked to Jelom. You know what he thinks..."),\
_("And so I shall. Where is he?"),\
_("Yes. I do know what he thinks. That, however, does not help me too much."),\
_("He is guarding the hallway leading to Lady Silverhair's room."),\
_("Well, I shall go and talk to him, then. Thank you and good day."),\
_("Ah, $name, hello."),\
_("Hi there, Talan. Look, I am sorry that ..."),\
_("No, you did what you had to do, and it was only right that I got in trouble for leaving my post. And I owe you a big favour for covering for me!"),\
_("No, I understand. It was because of me shirking my duty that I did not see what happened that night. You had no choice, and I do not hold a grudge against you for doing your duty."),\
_("All the same, I am still sorry for getting you into trouble with Jelom. Thank you for understanding."),\
_("No, it was the least I could do. Do not worry about the favour."),\
_("I don't know if there is any coincidence, but this visit of Master Fingolson is very unusual. You see, he is quite a regular here. He comes every other month or so, and usually stays for a week before going back."),\
_("Usually he shows up a few days before the client, just to relax, I guess. Until about a year ago, he would spend this time in the common room, drinking ale and talking with Erek, his apprentice."),\
_("About a year ago, things changed. When he came, he came alone. He still showed up a couple of days ahead of the client, but he no longer visited the common room much. He just stayed down in his room."),\
_("But at this most recent visit, Bjarn got here the morning of Lady Silverhair's arrival, which was a little strange. What's more, he brought Erek again, although I thought he had finished his apprenticeship and moved on long ago."),\
_("I see ... that is strange. Thanks for telling me, Talan!"),\
_("Couldn't it be that you missed what there was to see or hear?"),\
_("You think this argument is enough to prove Lady Silverhair's guilt?"),\
_("I ... I don't know."),\
_("And you say Lady Silverhair was accused because she \"obviously doesn't like dwarves\"!?"),\
_("True, but practically anyone at the Inn could have thrown an eye on the stones."),\
_("According to Jelom there is no doubt that Lady Silverhair is the thief."),\
_("I'm anxious to hear his reasoning, then."),\
_("I should think so Talan, considering the discomfort you caused my mistress."),\
_("What has happened cannot be undone, I fear. But perhaps there is a little detail that might help to clear the Lady's reputation."),\
_("I've been told about a noise that night, but you say you heard nothing."),\
_("Should I rather call you a bard then?"),\
_("A ... noise? What noise?"),\
_("The noise of someone singing, Talan."),\
_("According to Oliver, somebody must have been in the stables that night."),\
_("Well, I did not notice anything, $name. That's the truth!"),\
_("It had better be, my friend.."),\
_("If I want to prove Lady Silverhair's innocence, I have little other choice, I fear."),\
_("I, I understand. I am very sorry, $name ..."),\
_("Sure, but there is not much to tell. I was out here, making my rounds, walking around the yard like every other night."),\
_("Thank you sir. By the way, I am Talan."),\
_("I am $name, and I'd like to learn a little more about the theft."),\
_("Why was the Lady Silverhair accused?"),\
_("Are you sure you haven't noticed anything out of place that night?"),\
_("But Lady Frostbloom told me about a noise she heard."),\
_("Frostbloom? Then it must have been a Yeti, I suppose."),\
_("But seriously, I did not notice anything. I'm sorry, $name.")]
loop = [8, 9]
cond = [\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"silverhair_free\") != 1\n",\
"self.the_npc.get_val (\"apologised\") != 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_alek_eavesdrop\") == 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_talan_singing\") == 1 and self.the_npc.get_val (\"heard_nothing\") == 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_talan_singing\") == 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_low_quality\") == 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_jelom\") != 2\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_bjarns_insult\") == 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"told_on_talan\") < 1\n",\
"adonthell.gamedata_get_quest(\"demo\").get_val (\"know_talan_singing\") == 1 and adonthell.gamedata_get_quest(\"demo\").g |
disabler/isida3 | lib/DNS/Base.py | Python | gpl-3.0 | 12,716 | 0.033894 | """
$Id: Base.py,v 1.12.2.10 2008/08/01 03:58:03 customdesigned Exp $
This file is part of the pydns project.
Homepage: http://pydns.sourceforge.net
This code is covered by the standard Python License.
Base functionality. Request and Response classes, that sort of thing.
"""
import socket, string, types, time, select
import Type,Class,Opcode
import asyncore
#
# This random generator is used for transaction ids and port selection. This
# is important to prevent spurious results from lost packets, and malicious
# cache poisoning. This doesn't matter if you are behind a caching nameserver
# or your app is a primary DNS server only. To install your own generator,
# replace DNS.Base.random. SystemRandom uses /dev/urandom or similar source.
#
try:
from random import SystemRandom
random = SystemRandom()
except:
import random
class DNSError(Exception): pass
# Lib uses DNSError, so import after defining.
import Lib
defaults= { 'protocol':'udp', 'port':53, 'opcode':Opcode.QUERY,
'qtype':Type.A, 'rd':1, 'timing':1, 'timeout': 30 }
defaults['server']=[]
def ParseResolvConf(resolv_path="/etc/resolv.conf"):
"parses the /etc/resolv.conf file and sets defaults for name servers"
global defaults
lines=open(resolv_path).readlines()
for line in lines:
line = string.strip(line)
if not line or line[0]==';' or line[0]=='#':
continue
fields=string.split(line)
if len(fields) < 2:
continue
if fields[0]=='domain' and len(fields) > 1:
defaults['domain']=fields[1]
if fields[0]=='search':
pass
if fields[0]=='options':
pass
if fields[0]=='sortlist':
pass
if fields[0]=='nameserver':
if fields[1].count(':'):
""" Ignore IPv6 nameservers as we currently do not support querying them. """
pass
else:
defaults['server'].append(fields[1])
def DiscoverNameServers():
import sys
if sys.platform in ('win32', 'nt'):
import win32dns
defaults['server']=win32dns.RegistryResolve()
else:
return ParseResolvConf()
class DnsRequest:
""" high level Request object """
def __init__(self,*name,**args):
self.donefunc=None
self.async=None
self.defaults = {}
self.argparse(name,args)
self.defaults = self.args
self.tid = 0
def argparse(self,name,args):
if not name and self.defaults.has_key('name'):
args['name'] = self.defaults['name']
if type(name) is types.StringType:
args['name']=name
else:
if len(name) == 1:
if name[0]:
args['name']=name[0]
for i in defaults.keys():
if not args.has_key(i):
if self.defaults.has_key(i):
args[i]=self.defaults[i]
else:
args[i]=defaults[i]
if type(args['server']) == types.StringType:
args['server'] = [args['server']]
self.args=args
def socketInit(self,a,b):
self.s = socket.socket(a,b)
def processUDPReply(self):
if self.timeout > 0:
r,w,e = select.select([self.s],[],[],self.timeout)
if not len(r):
raise DNSError, 'Timeout'
(self.reply, self.from_address) = self.s.recvfrom(65535)
self.time_finish=time.time()
self.args['server']=self.ns
return self.processReply()
def processTCPReply(self):
if self.timeout > 0:
r,w,e = select.select([self.s],[],[],self.timeout)
if not len(r):
raise DNSError, 'Timeout'
f = self.s.makefile('r')
header = f.read(2)
if len(header) < 2:
raise DNSError,'EOF'
count = Lib.unpack16bit(header)
self.reply = f.read(count)
if len(self.reply) != count:
# FIXME: Since we are non-blocking, it could just be a large reply
# that we need to loop and wait for.
raise DNSError,'incomplete reply'
self.time_finish=time.time()
self.args['server']=self.ns
return self.processReply()
def processReply(self):
self.args['elapsed']=(self.time_finish-self.time_start)*1000
u = Lib.Munpacker(self.reply)
r=Lib.DnsResult(u,self.args)
r.args=self.args
#self.args=None # mark this DnsRequest object as used.
return r
#### TODO TODO TODO ####
# if protocol == 'tcp' and qtype == Type.AXFR:
# while 1:
# header = f.read(2)
# if len(header) < 2:
# print '========== EOF =========='
# break
# count = Lib.unpack16bit(header)
# if not count:
# print '========== ZERO COUNT =========='
# break
# print '========== NEXT =========='
# reply = f.read(count)
# if len(reply) != count:
# print '*** Incomplete reply ***'
# break
# u = Lib.Munpacker(reply)
# Lib.dumpM(u)
def getSource(self):
"Pick random source port to avoid DNS cache poisoning attack."
while True:
try:
source_port = random.randint(1024,65535)
self.s.bind(('', source_port))
break
except socket.error, msg:
# Error 98, 'Address already in use'
if msg[0] != 98: raise
def conn(self):
self.getSource()
self.s.connect((self.ns,self.port))
def req(self,*name,**args):
" needs a refactoring "
self.argparse(name,args)
#if not self.args:
# raise DNSError,'reinitialize request before reuse'
protocol = self.args['protocol']
self.port = self.args['port']
self.tid = random.randint(0,65535)
self.timeout = self.args['timeout'];
opcode = self.args['opcode']
rd = self.args['rd']
server=self.args['server']
if type(self.args['qtype']) == types.StringType:
try:
qtype = getattr(Type, string.upper(self.args['qtype']))
except AttributeError:
raise DNSError,'unknown query type'
else:
qtype=self.args['qtype']
if not self.args.has_key('name'):
print self.args
raise DNSError,'nothing to lookup'
qname = self.args['name']
if qtype == Type.AXFR:
print 'Query type AXFR, protocol forced to TCP'
protocol = 'tcp'
#print 'QTYPE %d(%s)' % (qtype, Type.typestr(qtype))
m = Lib.Mpacker()
# jesus. keywords and default args would be good. TODO.
m.addHeader(self.tid,
0, opcode, 0, 0, rd, 0, 0, 0,
1, 0, 0, 0)
m.addQuestion(qname, qtype, Class.IN)
self.request = m.getbuf()
try:
if protocol == 'udp':
self.sendUDPRequest(server)
else:
self.sendTCPRequest(server)
except socket.error, reason:
raise DNSError, reason
if self.async:
return None
else:
if not self.response:
raise DNSError,'no working nameservers found'
return self.response
def sendUDPRequest(self, server):
"refactor me"
self.response=None
for self.ns in server:
#print "trying udp",self.ns
try:
if self.ns.count(':'):
if hasattr(socket,'has_ipv6') and socket.has_ipv6:
self.socketInit(socket.AF_INET6, socket.SOCK_DGRAM)
else: continue
else:
self.socketInit(socket.AF_INET, socket.SOCK_DGRAM)
try:
# TODO. Handle timeouts &c correctly (RFC)
self.time_start=time.time()
self.conn()
if not self.async:
self.s.send(self.request)
r=self.processUDPReply()
# Since we bind to the source port and connect to the
# destination port, we don't need to check that here,
# but do make sure it's actually a DNS request that the
# packet is in reply to.
while r.header['id'] != self.tid \
or self.from_address[1] != self.port:
| r=self.processUDPReply()
self.response = r
# FIXME: check waiting async queries
finally:
if not self.async:
self.s.close()
except socket.error:
continue
break
def sendTCPRequest(self, server):
" do the work of sending a TCP request "
self.response=None
for self.ns in server:
#print "trying tcp",self.ns
try:
if self.ns.count(':'):
if hasattr(socket,'has_ipv6') and socket.has_ipv6:
self.socketInit(socket.AF_INET6, socket.SO | CK_STREAM)
else: continue
else:
self.socketInit(socket.AF_INET, socket.SOCK_STREAM)
try:
# TODO. Handle timeouts &c correctly (RFC)
self.time_start=time.time()
self.conn()
buf = Lib.pack16bit(len(self.request))+self.request
# Keep server from making sendall hang
self.s.setblocking(0)
# FIXME: throws WOULDBLOCK if request too large to fit in
# system buffer
self.s.sendall(buf)
self.s.shutdown(socket.SHUT_WR)
r=self.processTCPReply()
if r.header['id'] == self.tid:
self.response = r
break
finally:
self.s.close()
except socket.error:
continue
#class Dns |
philba/myblog | myblog/urls.py | Python | mit | 1,474 | 0 | from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import urls as djangoauth_urls
from search import views as search_views
from blog import views as blog_views
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailcore import urls as wagtail_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
urlpatterns = [
url(r'^', include(djangoauth_urls)),
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's page serving mechanism. This should be the last pattern in
# the list:
url(r'', include(wagtail_urls)),
# Alternat | ively, if you want Wagtail pages to be served from a subpath
# of your site, rather than the | site root:
# url(r'^pages/', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
boada/planckClusters | MOSAICpipe/plugins/_dust.py | Python | mit | 3,312 | 0.002114 | import os
import sys
import time
# get the utils from the parent directory
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils import SEx_head
from pipe_utils import tableio, deredden
# Find the eBV dust correction for each source in the catalogs
def DustCorrection(self):
''' This figures out the dust extinction and corrects the sextractor
photometry that has been cleaned by the BuildColorCat function. It also
puts the dust corrections into a series of dictions that are used by
BuildColorCat. So if we don't run this function it doesn't include the
dust correction. This is even true after it writes a dust file. I think
the dust file is really just there for us to inspect for funny stuff.
'''
print()
self.DustCat = self.tilename + ".dust"
# Get RA,DEC from the detection catalog
detCatalog = self.combcat['i']
detcols = SEx_head(detCatalog, verb=None)
cols = (detcols['NUMBER'], detcols['X_WORLD'], detcols['Y_WORLD'])
(id, ra, dec) = tableio.get_data(detCatalog, cols)
outColumns = ['ID', ]
# Get e(B-V) for every source in the detection catalog
print("# Computing e(B-V) for all %s ra,dec" % len(ra), file=sys.stderr)
self.eBV = deredden.get_EBV(ra, dec)
print("# Done...", file=sys.stderr)
# Prepare the header for the output file
header = '## {}\n'.format(time.ctime()) + \
'## Dust correction extinction ' +\
'for each object/filter in: {}\n'.format(self.tilename) +\
'## This file was generated automatically by the BCS ' +\
'Rutgers pipeline\n' +\
'## These must be subtracted from the SExtractor ' +\
'magnitudes \n' +\
'## Dust Correction e(B-V), mean, min, max: ' +\
'{0:.4f}, {0:.4f}, {0:.4f}\n'.format(self.eBV.mean(),
self.eBV.min(), self.eBV.max())
VarsOut = [id]
# Get the dust extinction correction for each filter
for filter in self.filters:
self.XCorr[filter] = deredden.filterFactor(filter) * self.eBV
self.XCorrError[filter] = self.XCorr[filter] * 0.16
# Some more work on the header
header += "## Dust Correction %s, mean, min, max: %.4f %.4f, %.4f mags\n" % (
filter, self.XCorr[filter].mean(), self.XCorr[filter].min(),
self.XCorr[filter].max())
outColumns.append(filter + '_MOSAICII Dust Correction')
outColumns.append(filter + '_MOSAICII Dust Correction Error')
VarsOut.append(self.XCorr[filter])
VarsOut.append(self.XCorrError[filter])
#print outColumns
i = 0
header += '# ' + str(i + 1) + '\t' + outColumns[i] + '\n'
for filter in self.filters:
header += '# {}\t{}\n'.format(str(i + 2), outColumns[i + 1])
header += '# {}\t{}\n'.format(str(i + 3), outColumns[i + 2])
i += 2
vars = tuple(VarsOut)
format = '%8i' + '%10.5f ' * (len(vars) - 1)
| print('# Writing Dust Extinction Catalog...', file=sys.stderr)
tableio.put_data(self.DustCat,
vars,
header=header,
format=format,
app | end='no')
print('# Dust file complete.', file=sys.stderr)
return
|
ssorj/blinky | misc/config.py | Python | apache-2.0 | 754 | 0.002653 | from blinky.appveyor import *
from blinky.circle import *
from blinky.github import | *
from blinky.jenkins import *
from blinky.travis impo | rt *
http_port = 8080
model.title = "Test CI"
# Components
proton_c = Component(model, "Proton C")
# Environments
multiple = Environment(model, "Multiple OSes")
# Agents
github = GitHubAgent(model, "GitHub")
# Categories
client_tests = Category(model, "Clients", "client")
# Groups
group = Group(model, client_tests, "Proton C")
# To look up GitHub Actions workflow IDs:
# curl https://api.github.com/repos/apache/qpid-proton/actions/workflows
GitHubJob (model, group, proton_c, multiple, github, None, "apache/qpid-proton", "master", "Build", 2012003)
|
nico202/primeDiceBot | primeDiceClass.py | Python | gpl-3.0 | 6,205 | 0.005963 | ''' LICENCE
This file is part of primeDiceBot.
primeDiceBot is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
primeDiceBot is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Nome-Programma. If not, see <http://www.gnu.org/licenses/>.
'''
import requests
import json
import sys
class primedice():
def __init__(self):
self.login_url = 'https://api.primedice.com/api/login'
self.bet_url = 'https://api.primedice.com/api/bet'
self.info_url = 'https://api.primedice.com/api/users/1'
self.seed_url = 'https://api.primedice.com/api/seed'
self.headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/43.0.2357.130 Chrome/43.0.2357.130 Safari/537.36'
}
self.session = requests.Session()
self.bet_count = 0
def session_post(self, url, post):
answer = self.session.post(url, data = post, headers = self.headers)
if answer.status_code == 429:
print "Too many requests"
else:
return answer
def login(self, username, password):
print "Login by username and password is disabled beacuase of reCaptcha"
print "You can login using a manually-provided token"
sys.exit("Please, read the readme")
# The following code is disabled right now
# self.username = username
# self.password = password
# post_data = {
# 'username':str(username),
# 'password':str(password),
# 'opt':''
# }
# login_response = self.session_post(self.login_url, post_data).content
# try:
# self.token = json.loads(login_response)["access_token"]
# self.bet_url_params = self.bet_url + "?access_token=" + self.token
# self.info_url_params = self.info_url + "?access_token=" + self.token
# self.balance = json.loads(\
# self.session.get(self.info_url_params).content\
# )["user"]["balance"]
# print "Login successful, token = %s" % (self.token)
# except:
# if login_response == "Unauthorized":
# sys.exit("Wrong login details")
# | elif login_response == "Too many requests.":
# sys.exit("Too many requests. Wait before running the script again")
# else:
# print("Something went wrong, unknown error")
# sy | s.exit(login_response)
"""
Set the 'fairness' random number generator seed. It's strictly
required to set it to be able to bet
"""
def set_seed(self, seed = 5164131849335):
post_data = {
'seed': seed
}
result = self.session.post(self.seed_url + "?access_token="+ self.token,
post_data, headers = self.headers)
# debug
# print result.content
def login(self, token):
self.token = token
self.bet_url_params = self.bet_url + "?access_token=" + self.token
self.info_url_params = self.info_url + "?access_token=" + self.token
try:
self.balance = json.loads(self.session.get(
self.info_url_params).content)["user"]["balance"]
print "Login successful, with the provided token %s" % (self.token)
except:
if login_response == "Unauthorized":
sys.exit("Wrong login details")
elif login_response == "Too many requests.":
sys.exit("Too many requests. Wait before running the script again")
else:
print("Something went wrong, unknown error")
sys.exit(login_response)
def bet(self, amount = 0, target = 95, condition = "<"):
try:
target = float(target)
amount = int(amount)
except:
return "Target must be an integer!"
#try:
if not condition in ["<",">"]:
print "Wrong condition. Must be either > or <"
else:
params = {
'access_token': self.token
}
post_data = {
'amount': str(amount),
'condition': str(condition),
'target': str(target)
}
rix = self.session_post(self.bet_url_params, post = post_data)
if rix.status_code == 200:
bet_response = json.loads(rix.content)
feedback = {
'jackpot': bet_response["bet"]["jackpot"],
'win': bet_response["bet"]["win"],
'amount': bet_response["bet"]["amount"],
}
self.balance = bet_response["user"]["balance"]
return feedback
elif rix.status_code == 400 and rix.content == "Insufficient funds":
sys.exit("Insufficient funds")
else:
print "\nYou have to debug this error"
print rix
print rix.content
sys.exit()
#except:
# print "Some error happened processing your request"
class helpers():
def config_check(self, config):
try:
config.base_bet = float(config.base_bet)
except:
sys.exit("Base bet must be a float")
try:
config.base_bet = int(config.base_bet)
except:
sys.exit("Base bet must be a integer")
try:
config.win_chance = float(config.win_chance)
except:
sys.exit("Win chance must be a float")
if config.win_chance > 98 or config.win_chance < 0.01:
sys.exit("Win chance not in range 0.01 - 98")
|
bpsagar/css2video | tests/parsers/value/test_color.py | Python | mit | 1,086 | 0 | import unittest
from css2video.constants import ValueType
from css2video.parsers.value import Color, ColorParseException
class TestCase(unittest.TestCas | e):
def test_parser(self):
response = Color.parse('#ffF')
self.assertEqual(
response,
dict(type=ValueType.color, red=255, green=255, blue=255, alpha=1)
)
response = Color.parse('#FFFFFF')
self.assertEqual(
response,
dict(type=ValueType.color, red=255, green=255, blue=255, alpha=1)
)
response = Color.parse(' | rgba(0, 0, 0, 1)')
self.assertEqual(
response,
dict(type=ValueType.color, red=0, green=0, blue=0, alpha=1)
)
response = Color.parse('RGB(0, 0, 0)')
self.assertEqual(
response,
dict(type=ValueType.color, red=0, green=0, blue=0, alpha=1)
)
with self.assertRaises(ColorParseException):
Color.parse('#FFFF')
with self.assertRaises(ColorParseException):
Color.parse('rgb(0, 0, 0, 0.9')
|
OnroerendErfgoed/language-tags | language_tags/data/__init__.py | Python | mit | 334 | 0.002994 | import o | s
import json
from io import open
__all__ = ['get']
parent_dir = os.path.dirname(__file__)
data_dir = 'json/'
cache = {}
def get(name):
if name not in cache:
with open(os.path.join(parent_dir, data_dir, "%s.json" % name), encoding='utf-8') as f:
cache[name] = json.load(f)
return cache[n | ame]
|
wpoely86/easybuild-easyblocks | easybuild/easyblocks/generic/pythonpackage.py | Python | gpl-2.0 | 21,887 | 0.002878 | ##
# Copyright 2009-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for Python packages, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import os
import re
import sys
import tempfile
from distutils.version import LooseVersion
from vsc.utils import fancylogger
from vsc.utils.missing import nub
import easybuild.tools.environment as env
from easybuild.easyblocks.python import EXTS_FILTER_PYTHON_PACKAGES
from easybuild.framework.easyconfig import CUSTOM
from easybuild.framework.extensioneasyblock import ExtensionEasyBlock
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import mkdir, rmtree2, which
from easybuild.tools.modules import get_software_root
from easybuild.tools.run import run_cmd
# not 'easy_install' deliberately, to avoid that pkg installations listed in easy-install.pth get preference
# '.' is required at the end when using easy_install/pip in unpacked source dir
EASY_INSTALL_INSTALL_CMD = "%(python)s setup.py easy_install --prefix=%(prefix)s %(installopts)s %(loc)s"
PIP_INSTALL_CMD = "pip install --prefix=%(prefix)s %(installopts)s %(loc)s"
SETUP_PY_INSTALL_CMD = "%(python)s setup.py install --prefix=%(prefix)s %(installopts)s"
UNKNOWN = 'UNKNOWN'
def pick_python_cmd(req_maj_ver=None, req_min_ver=None):
"""
Pick 'python' command to use, based on specified version requirements.
If the major version is specified, it must be an exact match (==).
If the minor version is specified, it is considered a minimal minor version (>=).
List of considered 'python' commands (in order)
* 'python' available through $PATH
* 'python<major_ver>' available through $PATH
* 'python<major_ver>.<minor_ver>' available through $PATH
* Python executable used in current session (sys.executable)
"""
log = fancylogger.getLogger('pick_python_cmd', fname=False)
def check_python_cmd(python_cmd):
"""Check whether specified Python command satisfies requirements."""
# check whether specified Python command is available
if os.path.isabs(python_cmd):
if not os.path.isfile(python_cmd):
log.debug("Python command '%s' does not exist", python_cmd)
return False
else:
python_cm | d_path = which(python_cmd)
if python_cmd_path is None:
log.debug("Python command '%s' not available through $PATH", python_cmd)
return False
if req_maj_ver is not None:
if req_min_ver is None:
req_majmin_ver = '%s.0' % req_maj_ver
else:
req_majmin_ver = '%s.%s' % (req_maj_ver, req_min_ver)
pycode = 'import sys; print("%s | .%s" % sys.version_info[:2])'
out, _ = run_cmd("%s -c '%s'" % (python_cmd, pycode), simple=False)
out = out.strip()
# (strict) check for major version
maj_ver = out.split('.')[0]
if maj_ver != str(req_maj_ver):
log.debug("Major Python version does not match: %s vs %s", maj_ver, req_maj_ver)
return False
# check for minimal minor version
if LooseVersion(out) < LooseVersion(req_majmin_ver):
log.debug("Minimal requirement for minor Python version not satisfied: %s vs %s", out, req_majmin_ver)
return False
# all check passed
log.debug("All check passed for Python command '%s'!", python_cmd)
return True
# compose list of 'python' commands to consider
python_cmds = ['python']
if req_maj_ver:
python_cmds.append('python%s' % req_maj_ver)
if req_min_ver:
python_cmds.append('python%s.%s' % (req_maj_ver, req_min_ver))
python_cmds.append(sys.executable)
log.debug("Considering Python commands: %s", ', '.join(python_cmds))
# try and find a 'python' command that satisfies the requirements
res = None
for python_cmd in python_cmds:
if check_python_cmd(python_cmd):
log.debug("Python command '%s' satisfies version requirements!", python_cmd)
if os.path.isabs(python_cmd):
res = python_cmd
else:
res = which(python_cmd)
log.debug("Absolute path to retained Python command: %s", res)
break
else:
log.debug("Python command '%s' does not satisfy version requirements (maj: %s, min: %s), moving on",
req_maj_ver, req_min_ver, python_cmd)
return res
def det_pylibdir(plat_specific=False, python_cmd=None):
"""Determine Python library directory."""
log = fancylogger.getLogger('det_pylibdir', fname=False)
if python_cmd is None:
# use 'python' that is listed first in $PATH if none was specified
python_cmd = 'python'
# determine Python lib dir via distutils
# use run_cmd, we can to talk to the active Python, not the system Python running EasyBuild
prefix = '/tmp/'
args = 'plat_specific=%s, prefix="%s"' % (plat_specific, prefix)
pycode = "import distutils.sysconfig; print(distutils.sysconfig.get_python_lib(%s))" % args
cmd = "%s -c '%s'" % (python_cmd, pycode)
log.debug("Determining Python library directory using command '%s'", cmd)
out, ec = run_cmd(cmd, simple=False, force_in_dry_run=True)
txt = out.strip().split('\n')[-1]
# value obtained should start with specified prefix, otherwise something is very wrong
if not txt.startswith(prefix):
raise EasyBuildError("Last line of output of %s does not start with specified prefix %s: %s (exit code %s)",
cmd, prefix, out, ec)
pylibdir = txt[len(prefix):]
log.debug("Determined pylibdir using '%s': %s", cmd, pylibdir)
return pylibdir
class PythonPackage(ExtensionEasyBlock):
"""Builds and installs a Python package, and provides a dedicated module file."""
@staticmethod
def extra_options(extra_vars=None):
"""Easyconfig parameters specific to Python packages."""
if extra_vars is None:
extra_vars = {}
extra_vars.update({
'unpack_sources': [True, "Unpack sources prior to build/install", CUSTOM],
'req_py_majver': [2, "Required major Python version (only relevant when using system Python)", CUSTOM],
'req_py_minver': [6, "Required minor Python version (only relevant when using system Python)", CUSTOM],
'runtest': [True, "Run unit tests.", CUSTOM], # overrides default
'use_easy_install': [False, "Install using '%s'" % EASY_INSTALL_INSTALL_CMD, CUSTOM],
'use_pip': [False, "Install using '%s'" % PIP_INSTALL_CMD, CUSTOM],
'zipped_egg': [False, "Install as a zipped eggs (requires use_easy_install)", CUSTOM],
})
return ExtensionEasyBlock.extra_options(extra_vars=extra_vars)
def __init__(self, *args, **kwargs):
"""Initialize custom class varia |
shunliz/test | python/flask/application/db/models.py | Python | apache-2.0 | 482 | 0.004149 | from sqlalchemy import Column, Integer, String
from sqlalchemy import Table
from application.db import Base
class User(Base):
__tablename__ = 'users'
id = Co | lumn(Integer, primary_key=True)
name = Column(String(50), unique=True)
password = Colum | n(String(120), unique=True)
def __init__(self, name=None, password=None):
self.name = name
self.password = password
def __repr__(self):
return '<User %r>' % (self.name)
|
ray-project/ray | python/ray/tune/cloud.py | Python | apache-2.0 | 10,640 | 0.000282 | import os
import shutil
import tempfile
from typing import Optional
from ray import logger
from ray.util import PublicAPI
from ray.util.ml_utils.cloud import (
download_from_bucket,
clear_bucket,
upload_to_bucket,
is_cloud_target,
)
@PublicAPI(stability="beta")
class TrialCheckpoint(os.PathLike):
def __init__(
self, local_path: Optional[str] = None, cloud_path: Optional[str] = None
):
self.local_path = local_path
self.cloud_path = cloud_path
# The following magic methods are implemented to keep backwards
# compatibility with the old path-based return values.
def __str__(self):
return self.local_path or self.cloud_path
def __fspath__(self):
return self.local_path
def __eq__(self, other):
if isinstance(other, str):
return self.local_path == other
elif isinstance(other, TrialCheckpoint):
return (
self.local_path == other.local_path
and self.cloud_path == other.cloud_path
)
def __add__(self, other):
if isinstance(other, str):
return self.local_path + other
raise NotImplementedError
def __radd__(self, other):
if isinstance(other, str):
return other + self.local_path
raise NotImplementedError
def __repr__(self):
return (
f"<TrialCheckpoint "
f"local_path={self.local_path}, "
f"cloud_path={self.cloud_path}"
f">"
)
def download(
self,
cloud_path: Optional[str] = None,
local_path: Optional[str] = None,
overwrite: bool = False,
) -> str:
"""Download checkpoint from cloud.
This will fetch the checkpoint directory from cloud storage
and save it to ``local_path``.
If a ``local_path`` argument is provided and ``self.local_path``
is unset, it will be set to ``local_path``.
Args:
cloud_path (Optional[str]): Cloud path to load checkpoint from.
Defaults to ``self.cloud_path``.
local_path (Optional[str]): Local path to save checkpoint at.
Defaults to ``self.local_path``.
overwrite (bool): If True, overwrites potential existing local
checkpoint. If False, exits if ``self.local_dir`` already
exists and has files in it.
"""
cloud_path = cloud_path or self.cloud_path
if not cloud_path:
raise RuntimeError(
"Could not download trial checkpoint: No cloud "
"path is set. Fix this by either passing a "
"`cloud_path` to your call to `download()` or by "
"passing a `cloud_path` into the constructor. The latter "
"should automatically be done if you pass the correct "
"`tune.SyncConfig`."
)
local_path = local_path or self.local_path
if not local_path:
raise RuntimeError(
"Could not download trial checkpoint: No local "
"path is set. Fix this by either passing a "
"`local_path` to your call to `download()` or by "
"passing a `local_path` into the constructor."
)
# Only update local path if unset
if not self.local_path:
self.local_path = local_path
if (
not overwrite
and os.path.exists(local_path)
and len(os.listdir(local_path)) > 0
):
# Local path already exists and we should not overwrite,
# so return.
return local_path
# Else: Actually download
# Delete existing dir
shutil.rmtree(local_path, ignore_errors=True)
# Re-create
os.makedirs(local_path, 0o755, exist_ok=True)
# Here we trigger the actual download
download_from_bucket(cloud_path, local_path)
# Local dir exists and is not empty
return local_path
def upload(
self,
cloud_path: Optional[str] = None,
local_path: Optional[str] = None,
clean_before: bool = False,
):
"""Upload checkpoint to cloud.
This will push the checkpoint directory from local storage
to ``cloud_path``.
If a ``cloud_path`` argument is provided and ``self.cloud_path``
is unset, it will be set to ``cloud_path``.
Args:
cloud_path (Optional[str]): Cloud path to load checkpoint from.
Defaults to ``self.cloud_path``.
local_path (Optional[str]): Local path to save checkpoint at.
Defaults to ``self.local_path``.
clean_before (bool): If True, deletes potentially existing
cloud bucket before storing new data.
"""
local_path = local_path or self.local_path
if not local_path:
raise RuntimeError(
"Could not upload trial checkpoint: No local "
"path is set. Fix this by either passing a "
"`local_path` to your call to `upload()` or by "
"passing a `local_path` into the constructor."
)
cloud_path = cloud_path or self.cloud_path
if not cloud_path:
raise RuntimeError(
"Could not download trial checkpoint: No cloud "
"path is set. Fix this by either passing a "
"`cloud_path` to your call to `download()` or by "
"passing a `cloud_path` into the constructor. The latter "
"should automatically be done if you pass the correct "
"`tune.SyncConfig`."
)
if not self.cloud_path:
self.cloud_path = cloud_path
if clean_before:
logger.info(f"Clearing bucket contents before upload: {cloud_path}")
clear_bucket(cloud_path)
# Actually upload
upload_to_bucket(cloud_path, local_path)
return cloud_path
def save(self, path: Optional[str] = None, force_download: bool = False):
"""Save trial checkpoint to director | y or cloud storage.
If the ``path`` is a local target and the checkpoint already exists
on local storage, the local directory is copied. Else, the checkpoint
is downloaded from cloud storage.
If the ``path`` is a cloud target and the checkpoint does not already
exist on local storage, it is downloaded from cloud storage before.
That way c | heckpoints can be transferred across cloud storage providers.
Args:
path (Optional[str]): Path to save checkpoint at. If empty,
the default cloud storage path is saved to the default
local directory.
force_download (bool): If ``True``, forces (re-)download of
the checkpoint. Defaults to ``False``.
"""
temp_dirs = set()
# Per default, save cloud checkpoint
if not path:
if self.cloud_path and self.local_path:
path = self.local_path
elif not self.cloud_path:
raise RuntimeError(
"Cannot save trial checkpoint: No cloud path "
"found. If the checkpoint is already on the node, "
"you can pass a `path` argument to save it at another "
"location."
)
else:
# No self.local_path
raise RuntimeError(
"Cannot save trial checkpoint: No target path "
"specified and no default local directory available. "
"Please pass a `path` argument to `save()`."
)
elif not self.local_path and not self.cloud_path:
raise RuntimeError(
f"Cannot save trial checkpoint to cloud target "
f"`{path}`: No existing local or cloud path was "
f"found. This indicates an error when loading "
f"the checkpoints. Please report this is |
ninefold/libcloud | libcloud/storage/drivers/ninefold.py | Python | apache-2.0 | 1,037 | 0.000964 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, sof | tware
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permi | ssions and
# limitations under the License.
from libcloud.storage.providers import Provider
from libcloud.storage.drivers.atmos import AtmosDriver
class NinefoldStorageDriver(AtmosDriver):
host = 'api.ninefold.com'
path = '/storage/v1.0'
type = Provider.NINEFOLD
name = 'Ninefold'
|
DeflatedPickle/pkinter | pkinter/boundbutton.py | Python | mit | 1,967 | 0.002034 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""""""
import tkinter as tk
from tkinter import ttk
# link
__title__ = "BoundButton"
__version__ = "1.0.3"
__author__ = "DeflatedPickle"
class BoundButton(ttk.Button):
"""
-----DESCRIPTION-----
A ttk Button that can be bound to any key to run any function.
-----USAGE-----
def function():
print("Button Pressed")
boundButton = BoundButton(parent, text=[string], key=[string], command=[function])
boundButton.pack()
-----PARAMETERS-----
parent = The parent of the widget.
text = The text of the Button.
key = The key that will activate the Button.
command = The function the bu | tton will run.
-----CONTENTS-----
---VARIABLES---
parent = The parent of the widget.
_text = The text of the Button.
_key = The key that will activate the Button.
_command = The function the button will run.
---TKINTER VARIABLES---
None
---WIDGETS---
self
---FUNCTIONS---
None
"""
def __init__(self, paren | t, text="", key="Return", command=None, *args):
ttk.Button.__init__(self, parent, default="active", text=text, command=command, *args)
self.parent = parent
self._text = text
self._key = key
self._command = command
self.bind("<{}>".format(key), command)
self.focus()
##################################################
if __name__ == "__main__":
def function(event):
if variable.get() == "foo":
variable.set("bar")
elif variable.get() == "bar":
variable.set("foo")
root = tk.Tk()
bbutton = BoundButton(root, text="Press F", key="f", command=function)
bbutton.pack(expand=True, padx=5, pady=5)
variable = tk.StringVar()
variable.set("foo")
label = ttk.Label(root, textvariable=variable).pack(expand=True, padx=5, pady=5)
root.mainloop()
|
karllessard/tensorflow | tensorflow/compiler/tests/complex_div_test.py | Python | apache-2.0 | 5,907 | 0.00237 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test cases for complex numbers division."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.platform import googletest
os.environ["XLA_FLAGS"] = ("--xla_cpu_fast_math_honor_nans=true "
"--xla_cpu_fast_math_honor_infs=true")
class ComplexNumbersDivisionTest(xla_test.XLATestCase):
"""Test cases for complex numbers division operators."""
def _testBinary(self, op, a, b, expected, equality_test=None):
with self.session() as session:
with self.test_scope():
pa = array_ops.placeholder(dtypes.as_dtype(a.dtype), a.shape, name="a")
pb = array_ops.placeholder(dtypes.as_dtype(b.dtype), b.shape, name="b")
output = op(pa, pb)
result = session.run(output, {pa: a, pb: b})
if equality_test is None:
equality_test = self.assertAllCloseAccordingToType
equality_test(result, expected, rtol=1e-3)
def testComplexOps(self):
for dtype in self.complex_types:
# Test division by 0 scenarios.
self._testBinary(
gen_math_ops.real_div,
np.array([
complex(1, 1),
complex(1, np.inf),
complex(1, np.nan),
complex(np.inf, 1),
complex(np.inf, np.inf),
complex(np.inf, np.nan),
complex(np.nan, 1),
complex(np.nan, np.inf),
complex(np.nan, np.nan),
],
dtype=dtype),
np.array([
0 + 0j,
0 + 0j,
0 + 0j,
0 + 0j,
0 + 0j,
0 + 0j,
0 + 0j,
0 + 0j,
0 + 0j,
],
dtype=dtype),
expected=np.array([
complex(np.inf, np.inf),
complex(np.inf, np.inf),
complex(np.inf, np.nan),
complex(np.inf, np.inf),
complex(np.inf, np.inf),
complex(np.inf, np.nan),
complex(np.nan, np.inf),
complex(np.nan, np.inf),
complex(np.nan, np.nan),
],
dtype=dtype))
# Test division with finite numerator, inf/nan denominator.
self._testBinary(
gen_math_ops.real_div,
np.array([
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
],
dtype=dtype),
np.array(
[
complex(1, np.inf),
complex(1, np.nan),
complex(np.inf, 1),
complex(np.inf, np.inf), # C++ and Python diverge here.
complex(np.inf, np.nan), # C++ and Python diverge here.
| complex(np.nan, 1),
complex(np.nan, np.inf), # C++ and Python diverge here.
complex(np.nan, np.nan),
],
dtype=dtype),
expected=np.array(
[
(1 + 1j) / complex(1, np.inf),
(1 + 1j) / complex(1, np.nan),
(1 + 1j) / complex(np.inf, 1),
| complex(0 + 0j), # C++ and Python diverge here.
complex(0 + 0j), # C++ and Python diverge here.
(1 + 1j) / complex(np.nan, 1),
complex(0 + 0j), # C++ and Python diverge here.
(1 + 1j) / complex(np.nan, np.nan),
],
dtype=dtype))
# Test division with inf/nan numerator, infinite denominator.
self._testBinary(
gen_math_ops.real_div,
np.array([
complex(1, np.inf),
complex(1, np.nan),
complex(np.inf, 1),
complex(np.inf, np.inf),
complex(np.inf, np.nan),
complex(np.nan, 1),
complex(np.nan, np.inf),
complex(np.nan, np.nan),
],
dtype=dtype),
np.array([
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
1 + 1j,
],
dtype=dtype),
expected=np.array(
[
complex(np.inf, np.inf), # C++ and Python diverge here.
complex(1 / np.nan) / (1 + 1j),
complex(np.inf / 1) / (1 + 1j),
complex(np.inf, -np.nan), # C++ and Python diverge here.
complex(np.inf, -np.inf), # C++ and Python diverge here.
complex(np.nan / 1) / (1 + 1j),
complex(np.inf, np.inf), # C++ and Python diverge here.
complex(np.nan / np.nan) / (1 + 1j),
],
dtype=dtype))
if __name__ == "__main__":
googletest.main()
|
xingjian-f/Leetcode-solution | 390. Elimination Game.py | Python | mit | 696 | 0.048851 | class Solution(object):
def lastRemaining(self, n):
"""
:type n: int
:rtype: int
"""
# a = range(1, n+1)
s = 2
if n == 1:
s = 1
length = n
step = 2
tag = 2
while length/2 > 1:
# print length, s, step
if tag == 2:
if (length/2) % 2 == 0:
s = s
else:
s = s + step
tag = 1
elif tag == 1:
s = s + step
tag = 2
length /= 2
step *= 2 |
# while len(a) > 1:
# a = a[1::2]
# a = a[::-1]
| return s
for i in range(1, 50):
print Solution().lastRemaining(i) |
bjornaa/gridmap | test/test_gridmap0.py | Python | mit | 4,541 | 0.001763 | # -*- coding: utf-8 -*-
"""Unit tests for gridmap classed"""
# ----------------------------------
# Bjørn Ådlandsvik <bjorn@imr.no>
# Institute of Marine Research
# ----------------------------------
import sys
from math import pi
import unittest
import numpy as np
sys.path = ['..'] + sys.path # import from developing version
import gridmap
# ------------------------------------
class test_PolarStereographic0(unittest.TestCase):
"""Test some analytic properties of the polar stereographic map"""
xp, yp, dx, ylon = 418.25, 257.25, 10000.0, 58.0
map0 = gridmap.PolarStereographic(xp, yp, dx, ylon)
map1 = gridmap.PolarStereographic(xp, yp, dx, ylon,
ellipsoid=gridmap.WGS84)
# Flytt de to første, til test_Interface
def test_scalar(self):
"""Should return a scalar for scalar input"""
pass
def test_vector(self):
"""Return arrays of the same shape as the input"""
def test_north_pole_forward(self):
"""The coordinates of the North Pole are xp, yp"""
lon, lat = 17.2, 90.0
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
self.assertEqual((x0, y0), (self.xp, self.yp))
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
self.assertEqual((x1, y1), (self.xp, self.yp))
def test_north_pole_backward(self):
"""Longitude is not defined at the North Pole"""
# Should raise an exception
# sphere
lon0, lat0 = self | .map0.grid2ll(self.xp, self.yp)
# WGS84
lon1, lat1 = self.map1.grid2ll(self.xp, self.yp)
def test_ylon(self):
"""lon = ylon <=> x = xp"""
# lon = ylon => x = xp
lon, lat = self.ylon, 72.3
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
self.assertEqual(x0, self.xp)
# WGS84
x1, y1 = self.ma | p1.ll2grid(lon, lat)
self.assertEqual(x1, self.xp)
# x = xp => y = ylon
x, y = self.xp, 222.222
# sphere
lon0, lat0 = self.map0.grid2ll(x, y)
self.assertAlmostEqual(lon0, self.ylon, places=13)
# WGS84
lon1, lat1 = self.map1.grid2ll(x, y)
self.assertAlmostEqual(lon1, self.ylon, places=13)
# x = xp, => angle = 0
x, y = self.xp, 222.222
# sphere
angle0 = self.map0.angle(x, y)
self.assertEqual(angle0, 0.0)
# WGS84
angle1 = self.map1.angle(x, y)
self.assertEqual(angle1, 0.0)
def test_inverse(self):
"""grid2ll and ll2grid are inverse"""
lon, lat = 5.323333, 60.3925 # Bergen
# sphere: ll -> xy -> ll
x0, y0 = self.map0.ll2grid(lon, lat)
lon0, lat0 = self.map0.grid2ll(x0, y0)
self.assertAlmostEqual(lon0, lon, places=14)
self.assertEqual(lat0, lat)
# WGS84: ll -> zy -> ll
x1, y1 = self.map1.ll2grid(lon, lat)
lon1, lat1 = self.map1.grid2ll(x1, y1)
self.assertAlmostEqual(lon1, lon, places=14)
self.assertAlmostEqual(lat1, lat, places=10)
x, y = 200.0, 133.12345 # "Arbitrary"
# sphere xy -> ll -> xy
lon0, lat0 = self.map0.grid2ll(x, y)
x0, y0 = self.map0.ll2grid(lon0, lat0)
self.assertAlmostEqual(x0, x, places=12)
self.assertAlmostEqual(y0, y, places=12)
# WGS84: xy -> ll -> xy
lon1, lat1 = self.map1.grid2ll(x, y)
x1, y1 = self.map1.ll2grid(lon1, lat1)
self.assertAlmostEqual(x1, x, places=9)
self.assertAlmostEqual(y1, y, places=9)
def test_angle(self):
"""angle = ylon - lon [rad]"""
lon, lat = 5.323333, 60.3925 # Bergen
angle = (self.ylon - lon)*pi/180
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
angle0 = self.map0.angle(x0, y0)
self.assertAlmostEqual(angle0, angle, places=15)
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
angle1 = self.map1.angle(x1, y1)
self.assertAlmostEqual(angle1, angle, places=15)
def test_scale(self):
"""scale = 1 at 60 deg"""
lon, lat = -10.0, 60.0
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
scale0 = self.map0.map_scale(x0, y0)
self.assertAlmostEqual(scale0, 1.0, places=15)
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
scale1 = self.map1.map_scale(x1, y1)
self.assertAlmostEqual(scale1, 1.0, places=12)
if __name__ == '__main__':
unittest.main()
|
c-oreills/pyFaceGraph | src/facegraph/url_operations.py | Python | unlicense | 2,148 | 0.003724 | import urllib
import urlparse
def get_path(url):
scheme, host, path, query, fragment = urlparse.urlsplit(url)
return path
def get_host(url):
scheme, host, path, query, fragment = urlparse.urlsplit(url)
return host
def add_path(url, new_path):
"""Given a url and path, return a new url that combines
the two.
"""
scheme, host, path, query, fragment = urlparse.urlsplit(url)
new_path = new_path.lstrip('/')
if path.endswith('/'):
path += new_path
else:
path += '/' + new_path
return urlparse.urlunsplit([scheme, host, path, query, fragment])
def _query_param(key, value):
"""ensure that a | query parameter's value is a string
of bytes in UTF-8 encoding.
"""
if isinstance(value, unicode):
pass
elif isinstance(value, str):
value = valu | e.decode('utf-8')
else:
value = unicode(value)
return key, value.encode('utf-8')
def _make_query_tuples(params):
if hasattr(params, 'items'):
return [_query_param(*param) for param in params.items()]
else:
return [_query_param(*params)]
def add_query_params(url, params):
"""use the _update_query_params function to set a new query
string for the url based on params.
"""
return update_query_params(url, params, update=False)
def update_query_params(url, params, update=True):
"""Given a url and a tuple or dict of parameters, return
a url that includes the parameters as a properly formatted
query string.
If update is True, change any existing values to new values
given in params.
"""
scheme, host, path, query, fragment = urlparse.urlsplit(url)
# urlparse.parse_qsl gives back url-decoded byte strings. Leave these as
# they are: they will be re-urlencoded below
query_bits = [(k, v) for k, v in urlparse.parse_qsl(query)]
if update:
query_bits = dict(query_bits)
query_bits.update(_make_query_tuples(params))
else:
query_bits.extend(_make_query_tuples(params))
query = urllib.urlencode(query_bits)
return urlparse.urlunsplit([scheme, host, path, query, fragment])
|
drawquest/drawquest-web | website/sentry.conf.py | Python | bsd-3-clause | 4,246 | 0.004946 | import sys
import os.path
import os
def add_to_python_path(path):
if path not in sys.path:
sys.path.append(path)
add_to_python_path('/var/canvas/website')
from settings_sentry_common import *
from configuration import Config
CONF_ROOT = os.path.dirname(__file__)
DATABASES = {
'default': {
# You can swap out the engine for MySQL easily by changing this value
# to ``django.db.backends.mysql`` or to PostgreSQL with
# ``django.db.backends.postgresql_psycopg2``
# If you change this, you'll also need to install the appropriate python
# package: psycopg2 (Postgres) or mysql-python
'ENGINE': 'django.db.backends.mysql',
'NAME': 'sentry',
'USER': 'sentry',
'PASSWORD': 'fakepassword',
'HOST': '',
'PORT': '3306',
'OPTIONS': {
# http://stackoverflow.com/questions/11853141/foo-objects-getid-none-returns-foo-instance-sometimes
'init_command': 'SET SQL_AUTO_IS_NULL=0;',
},
}
}
# If you're expecting any kind of real traffic on Sentry, we highly recommend configuring
# the CACHES and Redis settings
# You'll need to install the required dependencies for Memcached:
# pip install python-memcached
#
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': ['sentry.foo.example:11211'],
}
}
# Buffers (combined with queueing) act as an intermediate layer between the database and
# the storage API. They will greatly improve efficiency on large numbers of the same events
# being sent to the API in a short amount of time.
#SENTRY_USE_QUEUE = True
## For more information on queue options, see the documentation for Celery:
## http://celery.readthedocs.org/en/latest/
#BROKER_URL = 'redis://localhost:6379'
## You'll need to install the required dependencies for Redis buffers:
## pip install redis hiredis nydus
##
#SENTRY_BUFFER = 'sentry.buffer.redis.RedisBuffer'
#SENTRY_REDIS_OPTIONS = {
# 'hosts': {
# 0: {
# 'host': '127.0.0.1',
# 'port': 6379,
# }
# }
#}
# You should configure the absolute URI to Sentry. It will attempt to guess it if you don't
# but proxies may interfere with this.
SENTRY_URL_PREFIX = 'https://sentry.example.com' # No trailing slash!
PRODUCTION = bool(os.path.exists('/etc/canvas'))
PRODUCTION_DEBUG = bool(os.path.exists('/etc/canvas/debug'))
debug = not PRODUCTION or PRODUCTION_DEBUG
SENTRY_WEB_OPTIONS = {
'workers': 3, # the number of gunicorn workers
'secure_scheme_headers': {'X-FORWARDED-PROTO': 'https'},
'logfile': '/var/canvas/website/run/sentry.gunicorn.log',
'loglevel': 'debug',
'debug': debug,
'daemon': not debug,
'cpu_count': lambda: os.sysconf('SC_NPROCESSORS_ONLN'),
'bind': '0.0.0.0:9005',
}
# Mail server configuration
# For more information check Django's documentation:
# https://docs.djangoproject.com/en/1.3/topics/email/?from=olddocs#e-mail-backends
EMAIL_BACKEND = 'django_se | s.SESBackend'
# Used for the django_ses e-mail backend
AWS_ACCESS_KEY_ID = Config['aws']['access_key']
AWS_SECRET_ACCESS_KEY = Config['aws']['secret_key']
DKIM_SELECTOR = 'amazonses'
DKIM_DOMAIN = 'example.com'
DKIM_PRIVATE_KEY_PATH = '/etc/canvas/dkim.private.key'
DKIM_PRIVATE_KEY = o | pen(DKIM_PRIVATE_KEY_PATH).read() if os.path.exists(DKIM_PRIVATE_KEY_PATH) else None
AWS_SES_VERIFY_BOUNCE_SIGNATURES = True
# Domains that are trusted when retrieving the certificate
# used to sign bounce messages.
AWS_SNS_BOUNCE_CERT_TRUSTED_DOMAINS = ['amazonaws.com', 'amazon.com']
DEFAULT_FROM_EMAIL = "passwordreset@example.com"
# http://twitter.com/apps/new
# It's important that input a callback URL, even if its useless. We have no idea why, consult Twitter.
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET = ''
# http://developers.facebook.com/setup/
FACEBOOK_APP_ID = ''
FACEBOOK_API_SECRET = ''
# http://code.google.com/apis/accounts/docs/OAuth2.html#Registering
GOOGLE_OAUTH2_CLIENT_ID = ''
GOOGLE_OAUTH2_CLIENT_SECRET = ''
# https://github.com/settings/applications/new
GITHUB_APP_ID = ''
GITHUB_API_SECRET = ''
# https://trello.com/1/appKey/generate
TRELLO_API_KEY = ''
TRELLO_API_SECRET = ''
|
ScottWales/dmpr | test/test_dmpr.py | Python | apache-2.0 | 776 | 0.005155 | #!/usr/bin/env python
# Copyright | 2017 ARC Centre of Excellence for Climate Systems Science
# author: Scott Wales <scott.wales@unimelb.edu.au>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on | an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from dmpr import *
def test_dmpr():
pass
|
lucidlylogicole/pallet | pallet/__init__.py | Python | mit | 41 | 0.073171 | __version | __='0.2.0'
fro | m .pallet import * |
jlinn/pylastica | pylastica/query/simplequerystring.py | Python | apache-2.0 | 1,809 | 0.002211 | __author__ = 'Joe Linn'
from pylastica.query.abstract import Abstra | ctQuery
class SimpleQueryString(AbstractQuery):
| """
@see http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html
"""
OPERATOR_AND = "and"
OPERATOR_OR = "or"
def __init__(self, query, fields=None):
"""
@param query:
@type query: str
@param fields:
@type fields: list of str
"""
super(SimpleQueryString, self).__init__()
self.set_query(query)
if fields is not None:
self.set_fields(fields)
def set_query(self, query):
"""
Set the querystring for this query
@param query: see linked documentation for querystring syntax
@type query: str
@return:
@rtype: self
"""
return self.set_param("query", query)
def set_fields(self, fields):
"""
@param fields: the fields on which to perform this query. Defaults to index.query.default_field.
@type fields: list of str
@return:
@rtype: self
"""
return self.set_param("fields", fields)
def set_default_operator(self, operator):
"""
Set the default operator to use if no explicit operator is defined in the query string
@param operator: see OPERATOR_* constants for options
@type operator: str
@return:
@rtype: self
"""
return self.set_param("default_operator", operator)
def set_analyzer(self, analyzer):
"""
Set the analyzer used to analyze each term of the query
@param analyzer:
@type analyzer: str
@return:
@rtype: self
"""
return self.set_param("analyzer", analyzer) |
joshuar500/mcratesv1 | mcratesv1_project/mcratesv1_project/settings.py | Python | apache-2.0 | 2,870 | 0 | """
Django settings for mcratesv1_project project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import abspath, dirname, join, normpath
from sys import path
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3p2#l8#l*$5t$q5@9cyic2wr&g!4ykrmpsk8(lml74=az4oxq9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog_rankings',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.mi | ddleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middle | ware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'mcratesv1_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mcratesv1_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'mcratesdb',
'USER': 'alexjosh',
'PASSWORD': 'rinconholigores',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
Mariopilot808/BLNExporter | __init__.py | Python | gpl-2.0 | 1,407 | 0.004264 | """
/*******BLNexp
A QGIS plugin
Exports to Vector data to SurferBLN
-------------------
begin : 2014-16-09
copyright : (C) 2014 by Mario Noriega
email : mario.noriegaf@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public Lice | nse as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* | *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
def name():
return "BLN Exporter"
def description():
return "Converts Vector layers to BLN"
def version():
return "Version 0.3"
def icon():
return "icon.png"
def qgisMinimumVersion():
return "2.0"
def classFactory(iface):
# load BLNExp class from file BLNExp
from BLNExp import blnexp
return blnexp(iface)
|
shmish/core-assess | core/migrations/0005_auto_20170725_1458.py | Python | mpl-2.0 | 556 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-25 21:58
from __future__ import unicode_literals
from django.d | b import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20170716_0901'),
| ]
operations = [
migrations.RemoveField(
model_name='evidence',
name='student',
),
migrations.AlterField(
model_name='evidence',
name='done',
field=models.BooleanField(default=True),
),
]
|
CCI-MOC/GUI-Backend | api/v2/views/__init__.py | Python | apache-2.0 | 2,314 | 0.000864 | # flake8: noqa
from .account import AccountViewSet
from .allocation import AllocationViewSet
from .allocation_source import AllocationSourceViewSet
from .boot_script import BootScriptViewSet
from .base import BaseRequestViewSet
from .credential import CredentialViewSet
from .email_template import EmailTemplateViewSet
from .event import EventViewSet
from .group import GroupViewSet
from .help_link import HelpLinkViewSet
from .identity import IdentityViewSet
from .identity_membership import IdentityMembershipViewSet
from .image import ImageViewSet
from .image_bookmark import ImageBookmarkViewSet
from .image_tag import ImageTagViewSet
from .image_version import ImageVersionViewSet
from .image_version_boot_script import ImageVersionBootScriptViewSet
from .image_version_membership import ImageVersionMembershipViewSet
from .image_version_license import ImageVersionLicenseViewSet
from .instance import InstanceViewSet
from .instance_tag import InstanceTagViewSet
from .instance_history import InstanceStatusHistoryViewSet
from .instance_action import InstanceActionViewSet
from .license import LicenseViewSet
from .link import ExternalLinkViewSet
from .machine_request import MachineRequestViewSet
from .maintenance_record import MaintenanceRecordViewSet
from .platform_type import PlatformTypeViewSet
from .project import ProjectViewSet
from .project_ap | plication import ProjectApplicationViewSet
from .project_link import ProjectExternalLinkViewSet
from .project_instance import ProjectInstanceViewSet
from .project_volume import ProjectVolumeViewSet
from .provid | er import ProviderViewSet
from .provider_machine import ProviderMachineViewSet
from .provider_type import ProviderTypeViewSet
from .quota import QuotaViewSet
from .resource_request import ResourceRequestViewSet
from .reporting import ReportingViewSet
from .size import SizeViewSet
from .status_type import StatusTypeViewSet
from .email import InstanceSupportEmailViewSet, VolumeSupportEmailViewSet, FeedbackEmailViewSet, ResourceEmailViewSet
from .emulate import TokenEmulateViewSet, SessionEmulateViewSet
from .tag import TagViewSet
from .token import TokenViewSet
from .token_update import TokenUpdateViewSet
from .user import UserViewSet
from .volume import VolumeViewSet
from .metric import MetricViewSet
from .ssh_key import SSHKeyViewSet
|
horazont/aioxmpp | aioxmpp/roster/service.py | Python | lgpl-3.0 | 25,618 | 0.000156 | ########################################################################
# File name: service.py
# This file is part of: aioxmpp
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import asyncio
import logging
import aioxmpp.service
import aioxmpp.callbacks as callbacks
import aioxmpp.errors as errors
import aioxmpp.stanza as stanza
import aioxmpp.structs as structs
from . import xso as roster_xso
logger = logging.getLogger(__name__)
_Sentinel = object()
class Item:
"""
Represent an entry in the roster. These entries are mutable, see the
documentation of :class:`Service` for details on the lifetime of
:class:`Item` instances within a :class:`Service` instance.
.. attribute:: jid
The :class:`~aioxmpp.JID` of the entry. This is always a bare
JID.
.. attribute:: name
The display name of the entry, if any.
.. attribute:: groups
A :class:`set` of names of groups in which the roster entry is.
.. attribute:: subscription
The subscription status of the entry. One of ``"none"``, ``"to"``,
``"from"`` and ``"both"`` (in contrast to :class:`.xso.Item`,
``"remove"`` cannot occur here).
.. attribute:: ask
The ``ask`` attribute of the roster entry.
.. attribute:: approved
The ``approved`` attribute of the roster entry.
The data of a roster entry can conveniently be exported to JSON:
.. automethod:: export_as_json
To mutate the roster entry, some handy methods are provided:
.. automethod:: update_from_json
.. automethod:: update_from_xso_item
To create a roster entry from a :class:`.xso.Item`, use the
:meth:`from_xso_item` class method.
.. automethod:: from_xso_item
.. note::
Do not confuse this with the XSO :class:`.xso.Item`.
"""
def __init__(self, jid, *,
approved=False,
ask=None,
subscription="none",
name=None,
groups=()):
super().__init__()
self.jid = jid
self.subscription = subscription
self.approved = approved
self.ask = ask
self.name = name
self.groups = set(groups)
def update_from_xso_item(self, xso_item):
"""
Update the attributes (except :attr:`jid`) with the values obtained
from the gixen `xso_item`.
`xso_item` must be a valid :class:`.xso.Item` instance.
"""
self.subscription = xso_item.subscription
self.approved = xso_item.approved
self.ask = xso_item.ask
self.name = xso_item.name
self.groups = {group.name for group in xso_item.groups}
@classmethod
def from_xso_item(cls, xso_item):
"""
Create a :class:`Item` with the :attr:`jid` set to the
:attr:`.xso.Item.jid` obtained from `xso_item`. Then update that
instance with `xso_item` using :meth:`update_from_xso_item` and return
it.
"""
item = cls(xso_item.jid)
item.update_from_xso_item(xso_item)
return item
def export_as_json(self):
"""
Return a :mod:`json`-compatible dictionary which contains the
attributes of this :class:`Item` except its JID.
"""
result = {
"subscription": self.subscription,
}
if self.name:
result["name"] = self.name
if self.ask is not None:
result["ask"] = self.ask
if self.approved:
result["approved"] = self.approved
if self.groups:
result["groups"] = sorted(self.groups)
return result
def update_from_json(self, data):
"""
Update the attributes of this :class:`Item` using the values obtained
from the dictionary `data`.
The format of `data` should be the same as the format returned by
:meth:`export_as_json`.
"""
self.subscription = data.get("subscription", "none")
self.approved = bool(data.get("approved", False))
self.ask = data.get("ask", None)
self.name = data.get("name", None)
self.groups = set(data.get("groups", []))
class RosterClient(aioxmpp.service.Service):
"""
A roster client :class:`aioxmpp.service.Service`.
The interaction with a roster service happens mainly by accessing the
attributes holding the state and using the events to be notified of state
changes:
Attributes for accessing the roster:
.. attribute:: items
A dictionary mapping :class:`~aioxmpp.JID` instances to corresponding
:class:`Item` instances.
.. attribute:: groups
A dictionary which allows group-based access to :class:`Item`
instances. The dictionaries keys are the names of the groups, the values
are | :class:`set` instances, which hold the :class:`Item` instances in
that group.
At no point one can observe empt | y :class:`set` instances in this
dictionary.
The :class:`Item` instances stay the same, as long as they represent the
identical roster entry on the remote side. That is, if the name or
subscription state are changed in the server side roster, the :class:`Item`
instance stays the same, but the attributes are mutated. However, if the
entry is removed from the server roster and re-added later for the same
JID, it will be a different :class:`Item` instance.
Signals:
.. signal:: on_initial_roster_received()
Fires when the initial roster has been received. Note that if roster
versioning is used, the initial roster may not be up-to-date. The server
is allowed to tell the client to re-use its local state and deliver
changes using roster pushes. In that case, the
:meth:`on_initial_roster_received` event fires immediately, so that the
user sees whatever roster has been set up for versioning before the
stream was established; updates pushed by the server are delivered using
the normal events.
The roster data has already been imported at the time the callback is
fired.
Note that the initial roster is diffed against whatever is in the local
store and events are fired just like for normal push updates. Thus, in
general, you won’t need this signal; it might be better to listen for
the events below.
.. signal:: on_entry_added(item)
Fires when an `item` has been added to the roster. The attributes of the
`item` are up-to-date when this callback fires.
When the event fires, the bookkeeping structures are already updated.
This implies that :meth:`on_group_added` is called before
:meth:`on_entry_added` if the entry adds a new group.
.. signal:: on_entry_name_changed(item)
Fires when a roster update changed the name of the `item`. The new name
is already applied to the `item`.
.. signal:: on_entry_subscription_state_changed(item)
Fires when a roster update changes any of the :attr:`Item.subscription`,
:attr:`Item.ask` or :attr:`Item.approved` attributes. The new values are
already applied to `item`.
The event always fires once per update, even if the update changes
more than one of the above attributes.
.. signal:: on_entry_added_to_group(item, group_name)
Fires when an update adds an `item` to a group. The :attr:`Item.groups`
attribute is al |
carthagecollege/django-djsani | djsani/medical_history/waivers/views.py | Python | bsd-3-clause | 3,562 | 0 | # -*- coding: utf-8 -*-
"""Forms for medical history."""
import datetime
import os
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.urls import reve | rse_lazy
from djsani.core.utils import get_manager
from djsani.medical_history.waivers.models import Sicklecell
from djtools.fields import NEXT_YEAR
from djtools.fields.helpers import handle_uploaded_file
fro | m djtools.utils.convert import str_to_class
@login_required
def index(request, stype, wtype):
"""Generic waivers form."""
cid = request.user.id
# user student type and waiver type to build table name
table = 'cc_{0}_{1}_waiver'.format(stype, wtype)
# check for student manager record
manager = get_manager(cid)
# form name
fname = str_to_class(
'djsani.medical_history.waivers.forms',
'{0}Form'.format(wtype.capitalize()),
)
sicklecell = None
if wtype == 'sicklecell':
sicklecell = Sicklecell.objects.using('informix').filter(
college_id=cid,
).filter(
Q(proof=1) | Q(created_at__gte=settings.START_DATE),
).first()
# check to see if they already submitted this form.
# redirect except for sicklecell waiver
# or wtype does not return a form class (fname)
status = getattr(manager, table, None)
if (manager and status and wtype != 'sicklecell') or not fname:
return HttpResponseRedirect(reverse_lazy('home'))
if request.method == 'POST':
form = fname(request.POST, request.FILES)
if form.is_valid():
cd = form.cleaned_data
# deal with file uploads
if request.FILES.get('results_file'):
folder = 'sicklecell/{0}/{1}'.format(
cid, manager.created_at.strftime('%Y%m%d%H%M%S%f'),
)
phile = handle_uploaded_file(
request.FILES['results_file'],
os.path.join(settings.UPLOADS_DIR, folder),
)
cd['results_file'] = '{0}/{1}'.format(folder, phile)
if sicklecell:
# update student's sicklecell waiver record
cd['updated_at'] = datetime.datetime.now()
for key, form_val in cd.items():
setattr(sicklecell, key, form_val)
sicklecell.save(using='informix')
else:
# insert
cd['college_id'] = cid
cd['manager_id'] = manager.id
model = str_to_class(
'djsani.medical_history.waivers.models',
wtype.capitalize(),
)
waiver = model(**cd)
waiver.save(using='informix')
# update the manager
setattr(manager, table, True)
manager.save(using='informix')
return HttpResponseRedirect(reverse_lazy('waiver_success'))
else:
form = fname
# check for a valid template or redirect home
try:
template = 'medical_history/waivers/{0}_{1}.html'.format(stype, wtype)
os.stat(os.path.join(settings.BASE_DIR, 'templates', template))
except Exception:
return HttpResponseRedirect(reverse_lazy('home'))
return render(
request,
template,
{
'form': form,
'next_year': NEXT_YEAR,
'student': sicklecell,
},
)
|
idaholab/raven | framework/CodeInterfaces/Saphire/SaphireData.py | Python | apache-2.0 | 4,508 | 0.007542 |
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import os
import copy
import re
"""
Created on July 12, 2018
@author: wangc
"""
def _deweird(s):
"""
Sometimes numpy loadtxt returns strings like "b'stuff'"
This converts them to "stuff"
@ In, s, str, possibly weird string
@ Out, _deweird, str, possibly less weird string
"""
if type(s) == str and s.startswith("b'") and s.endswith("'"):
return s[2:-1]
else:
return s
class SaphireData:
"""
Class that parses output of SAPHIRE outputs and write a RAVEN compatible CSV
"""
def __init__(self, outFiles):
"""
Initialize the class
@ In, outFiles, list, list of output files of SAPHIRE
@ Out, None
"""
self.headerNames = [] # list of variable names in SAPHIRE output files
self.outData = [] # list of variable values in SAPHIRE output files
for outFile in outFiles:
outFileName, outFileType = outFile[0], outFile[1]
if outFileType == 'uncertainty':
headers, data = self.getUncertainty(outFileName)
self.headerNames.extend(headers)
self.outData.extend(data)
elif outFileType == 'importance':
headers, data = self.getImportance(outFileName)
self.headerNames.extend(headers)
self.outData.extend(data)
elif outFileType == 'quantiles':
print("File:",outFileName, "with type", outFileType, "is not implemented yet! Skipping" )
pass
else:
raise IOError('The output file', outFileName, 'with type', outFileType, 'is not supported yet!')
def getUncertainty(self, outName):
"""
Method to extract the uncertainty information of Event Tree or Fault Tree from SAPHIRE output files
@ In, outName, string, the name of output file
@ Out, (headerNames,outData), tuple, where headerNames is a list of output variable names and
outData is a list of output variable values
"""
headerNames = []
outData = []
outFile = os.path.abspath(os.path.expanduser(outName))
data = np.loadtxt(outFile, dtype=object, delimiter=',', skiprows=2)
headers = data[0]
for i in range(1, len(data)):
for j in range(1, len(headers)):
name = _deweird(data[i,0]).strip().replace(" ", "~")
header = _deweird(headers[j]).strip().replace(" ", "~")
headerNames.append(name + '_' + header)
outData.append(float(_deweird(data[i,j])))
return headerNames, outData
def getImportance(self, outName):
"""
Method to extract the importance information of Fault Tree from SAPHIRE output files
@ In, outName, string, the name of output file
@ Out, headerNames, list, list of output variable names
@ Out, outData, list, list of output variable va | lues
"""
headerNames = []
outData = []
outFile = os.path.abspath(os.path.expanduser(outName))
data = np.loadtxt(outFile, dtype=object, delimiter=',', skiprows=2)
headers = data[0]
for i in range(1, len(data)):
for j in range(1, len(headers)):
name = _deweird(data[i,0]).strip().replace(" ", "~")
header = _deweird(headers[j]).strip().replace(" ", "~")
headerNames.append( | name + '_' + header)
outData.append(float(_deweird(data[i,j])))
return headerNames, outData
def writeCSV(self, output):
"""
Print data into CSV format
@ In, output, str, the name of output file
@ Out, None
"""
outObj = open(output.strip()+".csv", mode='w+b') if not output.endswith('csv') else open(output.strip(), mode='w+b')
# create string for header names
headerString = ",".join(self.headerNames)
# write & save array as csv file
# FIXME: There is a problem with the numpy.savetxt, if provided data is 1D array_like, the demiliter will be
# ignored, and out file format is not correct
np.savetxt(outObj, [self.outData], delimiter=',', header=headerString, comments='')
outObj.close()
|
CCI-MOC/GUI-Backend | api/v2/views/account.py | Python | apache-2.0 | 771 | 0 | from django.contrib.auth.models import AnonymousUser
from core.models import Identity
from api.v2.serializers.post import AccountSerializer
from api.v2.views.base import AdminAuthVie | wSet
class AccountViewSet(AdminAuthViewSet):
"""
API endpoint that allows providers to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
queryset = Identity.objects.all()
serializer_class = AccountSerializer
h | ttp_method_names = ['post', 'head', 'options', 'trace']
def get_queryset(self):
"""
Filter providers by current user
"""
user = self.request.user
if (type(user) == AnonymousUser):
return Identity.objects.none()
identities = user.current_identities()
return identities
|
huggingface/pytorch-transformers | src/transformers/models/fsmt/__init__.py | Python | apache-2.0 | 1,962 | 0.002039 | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_fsmt": ["FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FSMTConfig"],
"tokenization_fsmt": ["FSMTTokenizer"],
}
if is_torch_available():
_import_structure["modeling_fsmt"] = ["FSMTForConditionalGeneration", "FSMTModel", "PretrainedFSMTModel"]
if TYPE_CHECKING:
from .configuration_fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig
from .tokenization_fsmt import FSMTTokenizer
if is_torch_available():
from .modeling_fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
| Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = gl | obals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
|
listen1/listen1 | listen1/app.py | Python | mit | 555 | 0.003604 | #!/usr/bin/e | nv python
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.options import options
from settings import settings
from urls import url_patterns
class TornadoBoilerplate(tornado.web.Application):
def __init__(self):
tornado.web.Application.__init__(self, url_patterns, **settings)
def main():
app = TornadoBoilerplate()
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start | ()
if __name__ == "__main__":
main()
|
brownnrl/moneyguru | core/gui/general_ledger_table.py | Python | gpl-3.0 | 2,408 | 0.002076 | # Copyright 2019 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from core.trans import trget
from .column import Column
from ..model.sort import ACCOUNT_SORT_KEY
from .table import Row
from .entry_table_base import EntryTableBase, EntryTableRow, TotalRow, PreviousBalanceRow
trcol = trget('columns')
class AccountRow(Row):
def __init__(self, table, account):
Row.__init__(self, table)
self.account = account
self.account_name = account.name
class GeneralLedgerRow(EntryTableRow):
@property
def balance(self):
if self.account.is_balance_sheet_account():
return EntryTableRow.balance.fget(self)
else:
return ''
class GeneralLedgerTable(EntryTableBase):
SAVENAME = 'GeneralLedgerTable'
COLUMNS = [
Column('status', display=''),
Column('date', display=trcol("Date")),
Column('reconciliation_date', display=trcol("Reconciliation Date"), visible=False, optional=True),
Column('checkno', display=trcol("Check #"), visible=False, optional=True),
Column('description', display=trcol("Description"), optional=True),
Column('payee', display=trcol("Payee"), visib | le=False, optional=True),
Column('transfer', display=trcol("Transfer")),
Column('debit', display=trcol("Debit")),
Column('credit', display=trcol("Credit")),
| Column('balance', display=trcol("Balance")),
]
ENTRY_ROWCLASS = GeneralLedgerRow
# --- Override
def _fill(self):
accounts = sorted(self.document.accounts, key=ACCOUNT_SORT_KEY)
for account in accounts:
rows = self._get_account_rows(account)
if not rows:
continue
self.append(AccountRow(self, account))
for row in rows:
self.append(row)
def _get_current_account(self):
row = self.selected_row
return row.account if row is not None else None
def _get_totals_currency(self):
return self.document.default_currency
# --- Public
def is_account_row(self, row):
return isinstance(row, AccountRow)
def is_bold_row(self, row):
return isinstance(row, (TotalRow, PreviousBalanceRow))
|
youtube/cobalt | third_party/inspector_protocol/code_generator.py | Python | bsd-3-clause | 25,953 | 0.007976 | #!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import sys
import argparse
import collections
import functools
import re
import copy
try:
import json
except ImportError:
import simplejson as json
import pdl
try:
unicode
except NameError:
# Define unicode for Py3
def unicode(s, *_):
return s
# Path handling for libraries and templates
# Paths have to be normalized because Jinja uses the exact template path to
# determine the hash used in the cache filename, and we need a pre-caching step
# to be concurrency-safe. Use absolute path because __file__ is absolute if
# module is imported, and relative if executed directly.
# If paths differ between pre-caching and individual file compilation, the cache
# is regenerated, which causes a race condition and breaks concurrent build,
# since some compile processes will try to read the partially written cache.
module_path, module_filename = os.path.split(os.path.realpath(__file__))
def read_config():
# pylint: disable=W0703
def json_to_object(data, output_base, config_base):
def json_object_hook(object_dict):
items = [(k, os.path.join(config_base, v) if k == "path" else v)
for (k, v) in object_dict.items()]
items = [(k, os.path.join(output_base, v) if k == "output" else v)
for (k, v) in items]
keys, values = list(zip(*items))
# 'async' is a keyword since Python 3.7.
# Avoid namedtuple(rename=True) for compatibility with Python 2.X.
keys = tuple('async_' if k == 'async' else k for k in keys)
return collections.namedtuple('X', keys)(*values)
return json.loads(data, object_hook=json_object_hook)
def init_defaults(config_tuple, path, defaults):
keys = list(config_tuple._fields) # pylint: disable=E1101
values = [getattr(config_tuple, k) for k in keys]
for i in range(len(keys)):
if hasattr(values[i], "_fields"):
values[i] = init_defaults(values[i], path + "." + keys[i], defaults)
for optional in defaults:
if optional.find(path + ".") != 0:
continue
optional_key = optional[len(path) + 1:]
if optional_key.find(".") == -1 and optional_key not in keys:
keys.append(optional_key)
values.append(defaults[optional])
return collections.namedtuple('X', keys)(*values)
try:
cmdline_parser = argparse.ArgumentParser()
cmdline_parser.add_argument("--output_base", type=unicode, required=True)
cmdline_parser.add_argument("--jinja_dir", type=unicode, required=True)
cmdline_parser.add_argument("--config", type=unicode, required=True)
cmdline_parser.add_argument("--config_value", default=[], action="append")
cmdline_parser.add_argument(
"--inspector_protocol_dir", type=unicode, required=True,
help=("directory with code_generator.py and C++ encoding / binding "
"libraries, relative to the root of the source tree."))
arg_options = cmdline_parser.parse_args()
jinja_dir = arg_options.jinja_dir
output_base = arg_options.output_base
config_file = arg_options.config
config_base = os.path.dirname(config_file)
config_values = arg_options.config_value
inspector_protocol_dir = arg_options.inspector_protocol_dir.lstrip('/')
except Exception:
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
exc = sys.exc_info()[1]
sys.stderr.write("Failed to parse command-line arguments: %s\n\n" % exc)
exit(1)
try:
config_json_file = open(config_file, "r")
config_json_string = config_json_file.read()
config_partial = json_to_object(config_json_string, output_base,
config_base)
config_json_file.close()
defaults = {
".use_snake_file_names": False,
".use_title_case_methods": False,
".imported": False,
".imported.export_macro": "",
".imported.export_header": False,
".imported.header": False,
".imported.package": False,
".imported.options": False,
".protocol.export_macro": "",
".protocol.export_header": False,
".protocol.options": False,
".protocol.file_name_prefix": "",
".exported": False,
".exported.export_macro": "",
".exported.export_header": False,
".lib": False,
".lib.export_macro": "",
".lib.export_header": False,
".crdtp": False,
".crdtp.dir": os.path.join(inspector_protocol_dir, "crdtp"),
".crdtp.namespace": "crdtp",
}
for key_value in config_values:
parts = key_value.split("=")
if len(parts) == 2:
defaults["." + parts[0]] = parts[1]
return (jinja_dir, config_file, init_defaults(config_partial, "", defaults))
except Exception:
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
exc = sys.exc_info()[1]
sys.stderr.write("Failed to parse config file: %s\n\n" % exc)
exit(1)
# ---- Begin of utilities exposed to generator ----
def to_title_case(name):
return name[:1].upper() + name[1:]
def dash_to_camelcase(word):
prefix = ""
if word[0] == "-":
prefix = "Negative"
word = word[1:]
return prefix + "".join(to_title_case(x) or "-" for x in word.split("-"))
def to_snake_case(name):
return re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", name, sys.maxsize).lower()
def to_method_case(config, name):
if config.use_title_case_methods:
return to_title_case(name)
return name
def join_arrays(dict, keys):
result = []
for key in keys:
if key in dict:
result += dict[key]
return result
def format_include(config, header, file_name=None):
if file_name is not None:
header = header + "/" + file_name + ".h"
header = "\"" + header + "\"" if header[0] not in "<\"" else header
if config.use_snake_file_names:
header = to_snake_case(header)
return header
def format_domain_include(config, header, file_name):
return format_include(config, header,
config.protocol.fil | e_name_prefix + file_name)
def to_file_name(config, file_name):
if config.use_snake_file_names:
return to_snake_case(file_name).replace(".cpp", ".cc")
return file_name
# ---- End of utilities exposed to generator ----
def initialize_jinja_env(jinja_dir, cache_dir, config):
# pylint: disable=F0401
sys.path.insert(1, os.path.abspath(jinja_dir))
import jinja2
jin | ja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(module_path),
# Bytecode cache is not concurrency-safe unless pre-cached:
# if pre-cached this is read-only, but writing creates a race condition.
bytecode_cache=jinja2.FileSystemBytecodeCache(cache_dir),
keep_trailing_newline=True, # newline-terminate generated files
lstrip_blocks=True, # so can indent control flow tags
trim_blocks=True)
jinja_env.filters.update({
"to_title_case": to_title_case,
"dash_to_camelcase": dash_to_camelcase,
"to_method_case": functools.partial(to_method_case, config)})
jinja_env.add_extension("jinja2.ext.loopcontrols")
return jinja_env
def create_imported_type_definition(domain_name, type, imported_namespace):
# pylint: disable=W0622
return {
"return_type": "std::unique_ptr<%s::%s::API::%s>" % (
imported_namespace, domain_name, type["id"]),
"pass_type": "std::unique_ptr<%s::%s::API::%s>" % (
imported_namespace, domain_name, type["id"]),
"to_raw_type": "%s.get()",
"to_pass_type": "std::move(%s)",
"to_rvalue": "std::move(%s)",
"type": "std::unique_ptr<%s::%s::API::%s>" % (
imported_namespace, domain_name, type["id"]),
"raw_type": "%s::%s::API::%s" % (
imported_namespace, domain_name, type["id"]),
"raw_pass_type": "%s::%s::API::%s*" % (
imported_namespace, domain_name, type["id"]),
"raw_return_type": "%s::%s::API::%s*" % (
imported_namespace, domain_name, type["id"]),
}
def create_user_type_definition(domain_name, type):
# pylint: disable=W0622
return {
"return_type": "std::unique_ptr<protocol::%s::%s>" % (
|
guptaankita/python-novaclient | novaclient/tests/functional/test_volumes_api.py | Python | apache-2.0 | 3,419 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import uuid
import six.moves
from novaclient import exceptions
from novaclient.tests.functional import base
def wait_for_delete(test, name, thing, get_func):
thing.delete()
for x in six.moves.range(60):
try:
thing = get_func(thing.id)
except exceptions.NotFound:
break
time.sleep(1)
else:
test.fail('%s %s still not deleted after 60s' % (name, thing.id))
class TestVolumesAPI(base.ClientTestBase):
def test_volumes_snapshots_types_create_get_list_delete(self):
# Create a volume
volume = self.client.volumes. | create(1)
# Make sure we can still list servers after using the volume endpoint
self.client.servers.list()
# This cleanup tests volume delete
self.addCleanup(volume.delete)
# Wait for the volume to become | available
for x in six.moves.range(60):
volume = self.client.volumes.get(volume.id)
if volume.status == 'available':
break
elif volume.status == 'error':
self.fail('Volume %s is in error state' % volume.id)
time.sleep(1)
else:
self.fail('Volume %s not available after 60s' % volume.id)
# List all volumes
self.client.volumes.list()
# Create a volume snapshot
snapshot = self.client.volume_snapshots.create(volume.id)
# This cleanup tests volume snapshot delete. The volume
# can't be deleted until the dependent snapshot is gone
self.addCleanup(wait_for_delete, self, 'Snapshot', snapshot,
self.client.volume_snapshots.get)
# Wait for the snapshot to become available
for x in six.moves.range(60):
snapshot = self.client.volume_snapshots.get(snapshot.id)
if snapshot.status == 'available':
break
elif snapshot.status == 'error':
self.fail('Snapshot %s is in error state' % snapshot.id)
time.sleep(1)
else:
self.fail('Snapshot %s not available after 60s' % snapshot.id)
# List snapshots
self.client.volume_snapshots.list()
# List servers again to make sure things are still good
self.client.servers.list()
# Create a volume type
# TODO(melwitt): Use a better random name
name = str(uuid.uuid4())
volume_type = self.client.volume_types.create(name)
# This cleanup tests volume type delete
self.addCleanup(self.client.volume_types.delete, volume_type.id)
# Get the volume type
volume_type = self.client.volume_types.get(volume_type.id)
# List all volume types
self.client.volume_types.list()
# One more servers list
self.client.servers.list()
|
twilio/twilio-python | tests/integration/api/v2010/account/test_short_code.py | Python | mit | 6,758 | 0.003403 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class ShortCodeTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request( |
'get',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/SMS/ShortCodes/SCXXXXXXXXXXX | XXXXXXXXXXXXXXXXXXXXX.json',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"api_version": "2010-04-01",
"date_created": null,
"date_updated": null,
"friendly_name": "API_CLUSTER_TEST_SHORT_CODE",
"short_code": "99990",
"sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sms_fallback_method": "POST",
"sms_fallback_url": null,
"sms_method": "POST",
"sms_url": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/SMS/ShortCodes/SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"api_version": "2010-04-01",
"date_created": null,
"date_updated": null,
"friendly_name": "API_CLUSTER_TEST_SHORT_CODE",
"short_code": "99990",
"sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sms_fallback_method": "POST",
"sms_fallback_url": null,
"sms_method": "POST",
"sms_url": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes.list()
self.holodeck.assert_has_request(Request(
'get',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/SMS/ShortCodes.json',
))
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"end": 0,
"first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50",
"last_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50",
"next_page_uri": null,
"num_pages": 1,
"page": 0,
"page_size": 50,
"previous_page_uri": null,
"short_codes": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"api_version": "2010-04-01",
"date_created": null,
"date_updated": null,
"friendly_name": "API_CLUSTER_TEST_SHORT_CODE",
"short_code": "99990",
"sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sms_fallback_method": "POST",
"sms_fallback_url": null,
"sms_method": "POST",
"sms_url": null,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
],
"start": 0,
"total": 1,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json"
}
'''
))
actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes.list()
self.assertIsNotNone(actual)
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"end": 0,
"first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50",
"last_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50",
"next_page_uri": null,
"num_pages": 1,
"page": 0,
"page_size": 50,
"previous_page_uri": null,
"short_codes": [],
"start": 0,
"total": 1,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json"
}
'''
))
actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.short_codes.list()
self.assertIsNotNone(actual)
|
mbdriscoll/asp-old | tests/em_ahc_test.py | Python | bsd-3-clause | 9,721 | 0.012036 | import unittest
import pylab as pl
import matplotlib as mpl
import itertools
import sys
import math
import timeit
import copy
from em import *
def generate_synthetic_data(N):
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
C1 = np.array([[-0.4, 1.7], [0.3, .7]])
Y = np.r_[
np.dot(np.random.randn(N/3, 2), C1),
np.dot(np.random.randn(N/3, 2), C),
np.random.randn(N/3, 2) + np.array([3, 3]),
]
return Y.astype(np.float32)
class EMTester(object):
def __init__(self, from_file, variant_param_spaces, device_id, num_subps, names_of_backends):
self.results = {}
self.variant_param_spaces = variant_param_spaces
self.device_id = device_id
self.num_subplots = num_subps
self.names_of_backends = names_of_backends
self.plot_id = num_subps/2*100 + 21
if from_file:
self.X = np.ndfromtxt('IS1000a.csv', delimiter=',', dtype=np.float32)
self.N = self.X.shape[0]
self.D = self.X.shape[1]
else:
N = 1000
self.X = generate_synthetic_data(N)
self.N = self.X.shape[0]
self.D = self.X.shape[1]
def new_gmm(self, M):
self.M = M
self.gmm = GMM(self.M, self.D, names_of_backends_to_use=self.names_of_backends, variant_param_spaces=self.variant_param_spaces, device_id=self.device_id)
def new_gmm_list(self, M, k):
self.M = M
self.init_num_clusters = k
self.gmm_list = [GMM(self.M, self.D, names_of_backends_to_use=self.names_of_backends, variant_param_spaces=self.variant_param_spaces, device_id=self.device_id) for i in range(k)]
def test_speech_ahc(self):
# Get the events, divide them into an initial k clusters and train each GMM on a cluster
per_cluster = self.N/self.init_num_clusters
init_training = zip(self.gmm_list,np.vsplit(self.X, range(per_cluster, self.N, per_cluster)))
for g, x in init_training:
g.train(x)
# Perform hierarchical agglomeration based on BIC scores
best_BIC_score = 1.0
while (best_BIC_score > 0 and len(self.gmm_list) > 1):
print "Num GMMs: %d, last score: %d" % (len(self.gmm_list), best_BIC_score)
num_clusters = len(self.gmm_list)
# Resegment data based on likelihood scoring
likelihoods = self.gmm_list[0].score(self.X)
for g in self.gmm_list[1:]:
likelihoods = np.column_stack((likelihoods, g.score(self.X)))
most_likely = likelihoods.argmax(axis=1)
# Across 2.5 secs of observations, vote on which cluster they should be associated with
iter_training = {}
for i in range(250, self.N, 250):
votes = np.zeros(num_clusters)
for j in range(i-250, i):
votes[most_likely[j]] += 1
#print votes.argmax()
iter_training.setdefault(self.gmm_list[votes.argmax()],[]).append(self.X[i-250:i,:])
votes = np.zeros(num_clusters)
for j in range((self.N/250)*250, self.N):
votes[most_likely[j]] += 1
#print votes.argmax()
iter_training.setdefault(self.gmm_list[votes.argmax()],[]).append(self.X[(self.N/250)*250:self.N,:])
# Retrain the GMMs on the clusters for which they were voted most likely and
# make a list of candidates for merging
iter_bic_list = []
for g, data_list in iter_training.iteritems():
cluster_data = data_list[0]
for d in data_list[1:]:
cluster_data = np.concatenate((cluster_data, d))
cluster_data = np.ascontiguousarray(cluster_data)
g.train(cluster_data)
iter_bic_list.append((g,cluster_data))
# Keep any GMMs that lost all votes in candidate list for merging
for g in self.gmm_list:
if g not in iter_training.keys():
iter_bic_list.append((g,None))
# Score all pairs of GMMs using BIC
best_merged_gmm = None
best_BIC_score = 0.0
merged_tuple = None
for gmm1idx in range(len(iter_bic_list)):
for gmm2idx in range(gmm1idx+1, len(iter_bic_list)):
g1, d1 = iter_bic_list[gmm1idx]
g2, d2 = iter_bic_list[gmm2idx]
score = 0.0
if d1 is not None or d2 is not None:
if d1 is not None and d2 is not None:
new_gmm, score = compute_distance_BIC(g1, g2, np.concatenate((d1, d2)))
elif d1 is not None:
new_gmm, score = compute_distance_BIC(g1, g2, d1)
else:
new_gmm, score = compute_distance_BIC(g1, g2, d2)
print "Comparing BIC %d with %d: %f" % (gmm1idx, gmm2idx, score)
if score > best_BIC_score:
best_merged_gmm = new_gmm
merged_tuple = (g1, g2)
best_BIC_score = score
# Merge the winning candidate pair if its deriable to do so
if best_BIC_score > 0.0:
self.gmm_list.remove(merged_tuple[0])
self.gmm_list.remove(merged_tuple[1])
self.gmm_list.append(best_merged_gmm)
print "Final size of each cluster:", [ g.M for g in self.gmm_list]
def test_cytosis_ahc(self):
M_start = self.M
M_end = 0
plot_counter = 2
for M in reversed(range(M_end, M_start)):
print "======================== AHC loop: M = ", M+1, " ==========================="
self.gmm.train(self.X)
#plotting
means = self.gmm.components.means.reshape((self.gmm.M, self.gmm.D))
covars = self.gmm.components.covars.reshape((self.gmm.M, self.gmm.D, self.gmm.D))
Y = self.gmm.predict(self.X)
if(sel | f.plot_id % 10 <= self.num_subplots):
self.results['_'.join(['ASP v',str(self.plot_id-(100*self.num_subplots+11)),'@',str(self.gmm.D),str(self.gmm.M),str(self.N)])] = (str(self.plot_id), copy.deepcopy(means), copy. | deepcopy(covars), copy.deepcopy(Y))
self.plot_id += 1
#find closest components and merge
if M > 0: #don't merge if there is only one component
gmm_list = []
for c1 in range(0, self.gmm.M):
for c2 in range(c1+1, self.gmm.M):
new_component, dist = self.gmm.compute_distance_rissanen(c1, c2)
gmm_list.append((dist, (c1, c2, new_component)))
#print "gmm_list after append: ", gmm_list
#compute minimum distance
min_c1, min_c2, min_component = min(gmm_list, key=lambda gmm: gmm[0])[1]
self.gmm.merge_components(min_c1, min_c2, min_component)
def time_cytosis_ahc(self):
M_start = self.M
M_end = 0
for M in reversed(range(M_end, M_start)):
print "======================== AHC loop: M = ", M+1, " ==========================="
self.gmm.train(self.X)
#find closest components and merge
if M > 0: #don't merge if there is only one component
gmm_list = []
for c1 in range(0, self.gmm.M):
for c2 in range(c1+1, self.gmm.M):
new_component, dist = self.gmm.compute_distance_rissanen(c1, c2)
gmm_list.append((dist, (c1, c2, new_component)))
#compute minimum distance
min_c1, min_c2, min_component = min(gmm_list, key=lambda gmm: gmm[0])[1]
self.gmm.merge_components(min_c1, min_c2, min_component)
def plot(self):
for t, r in self.results.iteritems():
splot = pl.subplot(r[0], title=t)
color_iter = itertools.cycle (['r', 'g', 'b', 'c'])
Y_ = r[3]
for i, (mean, co |
TribeMedia/synapse | synapse/rest/key/v2/remote_key_resource.py | Python | apache-2.0 | 8,864 | 0.000113 | # Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.http.server import request_handler, respond_with_json_bytes
from synapse.http.servlet import parse_integer, parse_json_object_from_request
from synapse.api.errors import SynapseError, Codes
from synapse.crypto.keyring import KeyLookupError
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from twisted.internet import defer
from io import BytesIO
import logging
logger = logging.getLogger(__name__)
class RemoteKey(Resource):
"""HTTP resource for retreiving the TLS certificate and NACL signature
verification keys for a collection of servers. Checks that the reported
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
that the NACL signature for the remote server is valid. Returns a dict of
JSON signed by both the remote server and by this server.
Supports individual GET APIs and a bulk query POST API.
Requsts:
GET /_matrix/key/v2/query/remote.server.example.com HTTP/1.1
GET /_matrix/key/v2/query/remote.server.example.com/a.key.id HTTP/1.1
POST /_matrix/v2/query HTTP/1.1
Content-Type: application/json
{
"server_keys": {
"remote.server.example.com": {
"a.key.id": {
"minimum_valid_until_ts": 1234567890123
}
}
}
}
Response:
HTTP/1.1 200 OK
Content-Type: application/json
{
"server_keys": [
{
"server_name": "remote.server.example.com"
"valid_until_ts": # posix timestamp
"verify_keys": {
"a.key.id": { # The identifier for a key.
key: "" # base64 encoded verification key.
}
}
"old_verify_keys": {
"an.old.key.id": { # The identifier for an old key.
key: "", # base64 encoded key
"expired_ts": 0, # when the key stop being used.
}
}
"tls_fingerprints": [
{ "sha256": # fingerprint }
]
"signatures": {
"remote.server.example.com": {...}
"this.server.example.com": {...}
}
}
]
}
"""
isLeaf = True
def __init__(self, hs):
self.keyring = hs.get_keyring()
self.store = hs.get_datastore()
self.version_string = hs.version_string
self.clock = hs.get_clock()
def render_GET(self, request):
self.async_render_GET(request)
return NOT_DONE_YET
@request_handler()
@defer.inlineCallbacks
def async_render_GET(self, request):
if len(request.postpath) == 1:
server, = request.postpath
query = {server: {}}
elif len(request.postpath) == 2:
server, key_id = request.postpath
minimum_valid_until_ts = parse_integer(
request, "minimum_valid_until_ts"
)
arguments = {}
if minimum_valid_until_ts is not None:
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
query = {server: {key_id: arguments}}
else:
raise SynapseError(
404, "Not found %r" % request.postpath, Codes.NOT_FOUND
)
yield self.query_keys(request, query, query_remote_on_cache_miss=True)
def render_POST(self, request):
self.async_render_POST(request)
return NOT_DONE_YET
@request_handler()
@defer.inlineCallbacks
def async_render_POST(self, request):
content = parse_json_object_from_request(request)
query = content["server_keys"]
yield self.query_keys(request, query, query_remote_on_cache_miss=True)
@defer.inlineCallbacks
def query_keys(self, request, query, query_remote_on_cache_miss=False):
logger.info("Handling query for keys %r", query)
store_queries = []
for server_name, key_ids in query.items():
if not key_ids:
key_ids = (None,)
for key_id in key_ids:
store_queries.append((server_name, key_id, None))
cached = yield self.store.get_server_keys_json(store_queries)
json_results = set()
time_now_ms = self.clock.time_msec()
cache_misses = dict()
for (server_name, key_id, from_server), results in cached.items():
results = [
(result["ts_added_ms"], result) for result in results
]
if not results and key_id is not None:
cache_misses.setdefault(server_name, set()).add(key_id)
continue
if key_id is not None:
ts_added_ms, most_recent_result = max(results)
ts_valid_until_ms = most_recent_result["ts_valid_until_ms"]
req_key = query.get(server_name, {}).get(key_id, {})
req_valid_until = req_key.get("minimum_valid_until_ts")
miss = False
if req_valid_until is not None:
if ts_valid_until_ms < req_valid_until:
logger.debug(
"Cached response for %r/%r is older than requested"
": valid_until (%r) < minimum_valid_until (%r)",
server_name, key_id,
ts_valid_until_ms, req_valid_until
)
miss = True
else:
logger.debug(
"Cached response for %r/%r is newer than requested"
": valid_until (%r) >= minimum_valid_until (%r)",
server_name, key_id,
ts_valid_until_ms, req_valid_until
)
elif (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms:
logger.debug(
"Cached response for %r/%r is too old"
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
server_name, key_id,
ts_added_ms, ts_valid_until_ms, time_now_ms
)
# We more than half way through the lifetime of the
# response. We should fetch a fresh copy.
miss = True
else:
logger.debug(
"Cached response for %r/%r is still valid"
": (added (%r) + valid_until (%r)) | / 2 < now (%r)",
server_name, key_id,
ts_added_ms, ts_valid_until_ms, time_now_ms
)
if miss:
cache_misses.setdefault(server_name, set()).add | (key_id)
json_results.add(bytes(most_recent_result["key_json"]))
else:
for ts_added, result in results:
json_results.add(bytes(result["key_json"]))
if cache_misses and query_remote_on_cache_miss:
for server_name, key_ids in cache_misses.items():
try:
yield self.keyring.get_server_verify_key_v2_direct(
server_name, key_ids
)
except KeyLookupError as e:
logger.info("Failed to fetch key: %s", e)
except:
logger.exception |
graycarl/iamhhb | manage.py | Python | mit | 812 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "iamhhb.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noq | a
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variabl | e? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
djoproject/pyshell | pyshell/register/profile/command.py | Python | gpl-3.0 | 5,109 | 0 | #!/usr/bin/env python -t
# -*- coding: utf-8 -*-
# Copyright (C) 2016 Jonathan Delvaux <pyshell@djoproject.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General | Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pyshell.command.command import MultiCommand
from pyshell.register.profile.default import DefaultProfile
from pyshell.register.profile. | exception import RegisterException
from pyshell.utils.raises import raiseIfInvalidKeyList
from pyshell.utils.raises import raiseIfNotInstance
class CommandLoaderProfile(DefaultProfile):
def __init__(self, root_profile):
# TODO (issue #90) remove unload_priority/load_priority
# Why load priority at 120.0 ?
# because parameter priority is 100.0 and the mltries use by command
# is created by parameter loader, if command loader is executed
# before the parameter loader, an error will occur.
# It will be fixed as soon as the command will have their own
# manager.
# Why unload priority at 80.0 ?
# commands need to be unload before the destruction of the mltries
# in the environment loader.
DefaultProfile.__init__(self,
root_profile,
unload_priority=80.0,
load_priority=120.0)
self.prefix = ()
self.cmdDict = {}
self.tempPrefix = None
self.stopList = set()
self.loadedCommand = None
self.loadedStopTraversal = None
def setTempPrefix(self, key_list):
raiseIfInvalidKeyList(key_list,
RegisterException,
self.__class__.__name__,
"setTempPrefix")
self.tempPrefix = key_list
def unsetTempPrefix(self):
self.tempPrefix = None
def getTempPrefix(self):
return self.tempPrefix
def setPrefix(self, key_list):
raiseIfInvalidKeyList(key_list,
RegisterException,
self.__class__.__name__,
"setPrefix")
self.prefix = key_list
def getPrefix(self):
return self.prefix
def addStopTraversal(self, key_list):
raiseIfInvalidKeyList(key_list,
RegisterException,
self.__class__.__name__,
"addStopTraversal")
if self.tempPrefix is not None:
stop = list(self.tempPrefix)
stop.extend(key_list)
else:
stop = key_list
self.stopList.add(tuple(stop))
def hasStopTraversal(self, key_list):
raiseIfInvalidKeyList(key_list,
RegisterException,
self.__class__.__name__,
"hasStopTraversal")
return tuple(key_list) in self.stopList
def addCmd(self, key_list, cmd):
raiseIfInvalidKeyList(key_list,
RegisterException,
self.__class__.__name__,
"addCmd")
raiseIfNotInstance(cmd,
"cmd",
MultiCommand,
RegisterException,
"addCmd",
self.__class__.__name__)
if self.tempPrefix is not None:
prefix = list(self.tempPrefix)
prefix.extend(key_list)
else:
prefix = key_list
final_cmd_key = tuple(prefix)
if final_cmd_key in self.cmdDict:
excmsg = ("(CommandLoader) addCmd, the following key already"
" exists: '" + str(" ".join(final_cmd_key) + "'"))
raise RegisterException(excmsg)
self.cmdDict[final_cmd_key] = cmd
return cmd
def hasCmd(self, key_list):
raiseIfInvalidKeyList(key_list,
RegisterException,
self.__class__.__name__,
"hasCmd")
return tuple(key_list) in self.cmdDict
def getContentList(self):
ret = []
for cmd_key in self.cmdDict.keys():
final_cmd_key = list(self.prefix)
final_cmd_key.extend(cmd_key)
ret.append(" ".join(final_cmd_key))
for stop in self.stopList:
final_stop = list(self.prefix)
final_stop.extend(stop)
ret.append("stop traversal @ '%s'" % " ".join(final_stop))
return ret
|
ton212/TW-Interaction-Pull | main.py | Python | mit | 2,254 | 0.005768 | """
TW Interaction Puller
by @tonsai
License : MIT License
"""
from tweepy.streaming import StreamListener
from tweepy import OAuthHand | ler
from tweepy import Stream
from co | nfig import config, pushbullet_token
from pushbullet import PushBullet
import json
class StdOutListener(StreamListener):
""" A listener handles tweets are the received from the stream.
This is a basic listener that just prints received tweets to stdout.
"""
def on_status(self, status):
""" Filter if this tweet mentioned you, then push it to PushBullet """
if config["username"] in status.text:
data = { "sender":status.author.screen_name.encode("utf-8"),
"text":status.text.encode("utf-8")
}
print "R | @" + data["sender"] + ": " + data["text"]
self.push(data)
return True
def on_direct_message(self, data):
""" Filter if this DM belong to you, then push it to PushBullet """
if config["username"][1:] in data.direct_message["recipient_screen_name"]:
data = { "sender": data.direct_message["sender_screen_name"].encode("utf-8"),
"recipient": data.direct_message["recipient_screen_name"].encode("utf-8"),
"text": data.direct_message['text'].encode("utf-8")
}
print "D | @" + data["sender"] + ": " + data["text"]
self.push(data)
return True
def push(self, data):
""" Push the given data to PushBullet """
if "recipient" in data:
msg_title = "New DM from @" + data["sender"]
elif data["text"][:3] == "RT " and config["username"] in data["text"]:
msg_title = "@" + data["sender"] + " retweeted your tweet."
else:
msg_title = "New mention from @" + data["sender"]
msg_body = data["text"]
p.push_note(msg_title, msg_body)
return True
if __name__ == '__main__':
l = StdOutListener()
p = PushBullet(pushbullet_token)
auth = OAuthHandler(config["consumer_key"], config["consumer_secret"])
auth.set_access_token(config["access_token"], config["access_token_secret"])
stream = Stream(auth, l)
stream.userstream()
|
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/file/cmd/delete/errors.py | Python | unlicense | 1,652 | 0.00908 | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: errors.py
import mcl.status
ERR_SUCCESS = mcl.status.MCL_SUCCESS
ERR_INVALID_PARAM = mcl.status.framework.ERR_START
ERR_GET_FULL_PATH_FAILED = mcl.status.framework.ERR_START + 1
ERR_ENUM_FAILED = mcl.status.framework.ERR_START + 2
ERR_MARSHAL_FAILED = mcl.status.framework.ERR_START + 3
ERR_FILE_IS_A_DIR = mcl.status.framework.ERR_START + 4
ERR_DELETE_FAILED = mcl.status.framework.ERR_START + 5
ERR_API_FAILED = mcl.status.framework.ERR_START + 6
ERR_STAT | _FAILED = mcl.status.framework.ERR_START + 7
ERR_DEL_PATH_UNC_AFTER_REBOOT = mcl.status.framework.ERR_START + 8
ERR_DONE_MAX_ENTRIES = mcl.status.framework.ERR_START + 9
ERR_NO_MAX_SPECIFIED = mcl.status.framework.ERR_START + 10
ERR_SEND_FAILED = mcl.status.framework.ERR_START + 11
errorStrings = {ERR_INVALID_PARAM: 'Invalid parameter(s)',
ERR_GET_FULL_PATH_FAILED: 'Failed to get full path for deletion',
ERR_ENUM_FAILED: 'Failed to enumerate dire | ctory for matching files',
ERR_MARSHAL_FAILED: 'Marshaling of deletion data failed',
ERR_FILE_IS_A_DIR: 'Given file is a directory -- refusing deletion',
ERR_DELETE_FAILED: 'Deletion failed',
ERR_API_FAILED: 'Failed to get delete api',
ERR_STAT_FAILED: 'Unable to check file',
ERR_DEL_PATH_UNC_AFTER_REBOOT: 'Cannot do delete-after-reboot action with network paths.',
ERR_DONE_MAX_ENTRIES: 'Delete completed due to exceeding maximum entries',
ERR_NO_MAX_SPECIFIED: 'No Max Entries was given',
ERR_SEND_FAILED: 'Failed to send back data'
} |
0vercl0k/rp | src/third_party/capstone/bindings/python/capstone/mips_const.py | Python | mit | 17,669 | 0 | # For Capstone Engine. AUTO-GENERATED FILE, DO NOT EDIT [mips_const.py]
MIPS_OP_INVALID = 0
MIPS_OP_REG = 1
MIPS_OP_IMM = 2
MIPS_OP_MEM = 3
MIPS_REG_INVALID = 0
MIPS_REG_PC = 1
MIPS_REG_0 = 2
MIPS_REG_1 = 3
MIPS_REG_2 = 4
MIPS_REG_3 = 5
MIPS_REG_4 = 6
MIPS_REG_5 = 7
MIPS_REG_6 = 8
MIPS_REG_7 = 9
MIPS_REG_8 = 10
MIPS_REG_9 = 11
MIPS_REG_10 = 12
MIPS_REG_11 = 13
MIPS_REG_12 = 14
MIPS_REG_13 = 15
MIPS_REG_14 = 16
MIPS_REG_15 = 17
MIPS_REG_16 = 18
MIPS_REG_17 = 19
MIPS_REG_18 = 20
MIPS_REG_19 = 21
MIPS_REG_20 = 22
MIPS_REG_21 = 23
MIPS_REG_22 = 24
MIPS_REG_23 = 25
MIPS_REG_24 = 26
MIPS_REG_25 = 27
MIPS_REG_26 = 28
MIPS_REG_27 = 29
MIPS_REG_28 = 30
MIPS_REG_29 = 31
MIPS_REG_30 = 32
MIPS_REG_31 = 33
MIPS_REG_DSPCCOND = 34
MIPS_REG_DSPCARRY = 35
MIPS_REG_DSPEFI = 36
MIPS_REG_DSPOUTFLAG = 37
MIPS_REG_DSPOUTFLAG16_19 = 38
MIPS_REG_DSPOUTFLAG20 = 39
MIPS_REG_DSPOUTFLAG21 = 40
MIPS_REG_DSPOUTFLAG22 = 41
MIPS_REG_DSPOUTFLAG23 = 42
MIPS_REG_DSPPOS = 43
MIPS_REG_DSPSCOUNT = 44
MIPS_REG_AC0 = 45
MIPS_REG_AC1 = 46
MIPS_REG_AC2 = 47
MIPS_REG_AC3 = 48
MIPS_REG_CC0 = 49
MIPS_REG_CC1 = 50
MIPS_REG_CC2 = 51
MIPS_REG_CC3 = 52
MIPS_REG_CC4 = 53
MIPS_REG_CC5 = 54
MIPS_REG_CC6 = 55
MIPS_REG_CC7 = 56
MIPS_REG_F0 = 57
MIPS_REG_F1 = 58
MIPS_REG_F2 = 59
MIPS_REG_F3 = 60
MIPS_REG_F4 = 61
MIPS_REG_F5 = 62
MIPS_REG_F6 = 63
MIPS_REG_F7 = 64
MIPS_REG_F8 = 65
MIPS_REG_F9 = 66
MIPS_REG_F10 = 67
MIPS_REG_F11 = 68
MIPS_REG_F12 = 69
MIPS_REG_F13 = 70
MIPS_REG_F14 = 71
MIPS_REG_F15 = 72
MIPS_REG_F16 = 73
MIPS_REG_F17 = 74
MIPS_REG_F18 = 75
MIPS_REG_F19 = 76
MIPS_REG_F20 = 77
MIPS_REG_F21 = 78
MIPS_REG_F22 = 79
MIPS_REG_F23 = 80
MIPS_REG_F24 = 81
MIPS_REG_F25 = 82
MIPS_REG_F26 = 83
MIPS_REG_F27 = 84
MIPS_REG_F28 = 85
MIPS_REG_F29 = 86
MIPS_REG_F30 = 87
MIPS_REG_F31 = 88
MIPS_REG_FCC0 = 89
MIPS_REG_FCC1 = 90
MIPS_REG_FCC2 = 91
MIPS_REG_FCC3 = 92
MIPS_REG_FCC4 = 93
MIPS_REG_FCC5 = 94
MIPS_REG_FCC6 = 95
MIPS_REG_FCC7 = 96
MIPS_REG_W0 = 97
MIPS_REG_W1 = 98
MIPS_REG_W2 = 99
MIPS_REG_W3 = 100
MIPS_REG_W4 = 101
MIPS_REG_W5 = 102
MIPS_REG_W6 = 103
MIPS_REG_W7 = 104
MIPS_REG_W8 = 105
MIPS_REG_W9 = 106
MIPS_REG_W10 = 107
MIPS_REG_W11 = 108
MIPS_REG_W12 = 109
MIPS_REG_W13 = 110
MIPS_REG_W14 = 111
MIPS_REG_W15 = 112
MIPS_REG_W16 = 113
MIPS_REG_W17 = 114
MIPS_REG_W18 = 115
MIPS_REG_W19 = 116
MIPS_REG_W20 = 117
MIPS_REG_W21 = 118
MIPS_REG_W22 = 119
MIPS_REG_W23 = 120
MIPS_REG_W24 = 121
MIPS_REG_W25 = 122
MIPS_REG_W26 = 123
MIPS_REG_W27 = 124
MIPS_REG_W28 = 125
MIPS_REG_W29 = 126
MIPS_REG_W30 = 127
MIPS_REG_W31 = 128
MIPS_REG_HI = 129
MIPS_REG_LO = 130
MIPS_REG_P0 = 131
MIPS_REG_P1 = 132
MIPS_REG_P2 = 133
MIPS_REG_MPL0 = 134
MIPS_REG_MPL1 = 135
MIPS_REG_MPL2 = 136
MIPS_REG_ENDING = 137
MIPS_REG_ZERO = MIPS_REG_0
MIPS_REG_AT = MIPS_REG_1
MIPS_REG_V0 = MIPS_REG_2
MIPS_REG_V1 = MIPS_REG_3
MIPS_REG_A0 = MIPS_REG_4
MIPS_REG_A1 = MIPS_REG_5
MIPS_REG_A2 = MIPS_REG_6
MIPS_REG_A3 = MIPS_REG_7
MIPS_REG_T0 = MIPS_REG_8
MIPS_REG_T1 = MIPS_REG_9
MIPS_REG_T2 = MIPS_REG_10
MIPS_REG_T3 = MIPS_REG_11
MIPS_REG_T4 = MIPS_REG_12
MIPS_REG_T5 = MIPS_REG_13
MIPS_REG_T6 = MIPS_REG_14
MIPS_REG_T7 = MIPS_REG_15
MIPS_REG_S0 = MIPS_REG_16
MIPS_REG_S1 = MIPS_REG_17
MIPS_REG_S2 = MIPS_REG_18
MIPS_REG_S3 = MIPS_REG_19
MIPS_REG_S4 = MIPS_REG_20
MIPS_REG_S5 = MIPS_REG_21
MIPS_REG_S6 = MIPS_REG_22
MIPS_REG_S7 = MIPS_REG_23
MIPS_REG_T8 = MIPS_REG_24
MIPS_REG_T9 = MIPS_REG_25
MIPS_REG_K0 = MIPS_REG_26
MIPS_REG_K1 = MIPS_REG_27
MIPS_REG_GP = MIPS_REG_28
MIPS_REG_SP = MIPS_REG_29
MIPS_REG_FP = MIPS_REG_30
MIPS_REG_S8 = MIPS_REG_30
MIPS_REG_RA = MIPS_REG_31
MIPS_REG_HI0 = MIPS_REG_AC0
MIPS_REG_HI1 = MIPS_REG_AC1
MIPS_REG_HI2 = MIPS_REG_AC2
MIPS_REG_HI3 = MIPS_REG_AC3
MIPS_REG_LO0 = MIPS_REG_HI0
MIPS_REG_LO1 = MIPS_REG_HI1
MIPS_REG_LO2 = MIPS_REG_HI2
MIPS_REG_LO3 = MIPS_REG_HI3
MIPS_INS_INVALID = 0
MIPS_INS_ABSQ_S = 1
MIPS_INS_ADD = 2
MIPS_INS_ADDIUPC = 3
MIPS_INS_ADDIUR1SP = 4
MIPS_INS_ADDIUR2 = 5
MIPS_INS_ADDIUS5 = 6
MIPS_INS_ADDIUSP = 7
MIPS_INS_ADDQH = 8
MIPS_INS_ADDQH_R = 9
MIPS_INS_ADDQ = 10
MIPS_INS_ADDQ_S = 11
MIPS_INS_ADDSC = 12
MIPS_INS_ADDS_A = 13
MIPS_INS_ADDS_S = 14
MIPS_INS_ADDS_U = 15
MIPS_INS_ADDU16 = 16
MIPS_INS_ADDUH = 17
MIPS_INS_ADDUH_R = 18
MIPS_INS_ADDU = 19
MIPS_INS_ADDU_S = 20
MIPS_INS_ADDVI = 21
MIPS_INS_ADDV = 22
MIPS_INS_ADDWC = 23
MIPS_INS_ADD_A = 24
MIPS_INS_ADDI = 25
MIPS_INS_ADDIU = 26
MIPS_INS_ALIGN = 27
MIPS_INS_ALUIPC = 28
MIPS_INS_AND = 29
MIPS_INS_AND16 = 30
MIPS_INS_ANDI16 = 31
MIPS_INS_ANDI = 32
MIPS_INS_APPEND = 33
MIPS_INS_ASUB_S = 34
MIPS_INS_ASUB_U = 35
MIPS_INS_AUI = 36
MIPS_INS_AUIPC = 37
MIPS_INS_AVER_S = 38
MIPS_INS_AVER_U = 39
MIPS_INS_AVE_S = 40
MIPS_INS_AVE_U = 41
MIPS_INS_B16 = 42
MIPS_INS_BADDU = 43
MIPS_INS_BAL = 44
MIPS_INS_BALC = 45
MIPS_INS_BALIGN = 46
MIPS_INS_BBIT0 = 47
MIPS_INS_BBIT032 = 48
MIPS_INS_BBIT1 = 49
MIPS_INS_BBIT132 = 50
MIPS_INS_BC = 51
MIPS_INS_BC0F = 52
MIPS_INS_BC0FL = 53
MIPS_INS_BC0T = 54
MIPS_INS_BC0TL = 55
MIPS_INS_BC1EQZ = 56
MIPS_INS_BC1F = 57
MIPS_INS_BC1FL = 58
MIPS_INS_BC1NEZ = 59
MIPS_INS_BC1T = 60
MIPS_INS_BC1TL = 61
MIPS_INS_BC2EQZ = 62
MIPS_INS_BC2F = 63
MIPS_INS_BC2FL = 64
MIPS_INS_BC2NEZ = 65
MIPS_INS_BC2T = 66
MIPS_INS_BC2TL = 67
MIPS_INS_BC3F = 68
MIPS_INS_BC3FL = 69
MIPS_INS_BC3T = 70
MIPS_INS_BC3TL = 71
MIPS_INS_BCLRI = 72
MIPS_INS_BCLR = 73
MIPS_INS_BEQ = 74
MIPS_INS_BEQC = 75
MIPS_INS_BEQL = 76
MIPS_INS_BEQZ16 = 77
MIPS_INS_BEQZALC = 78
MIPS_INS_BEQZC = 79
MIPS_INS_BGEC = 80
MIPS_INS_BGEUC = 81
MIPS_INS_BGEZ = 82
MIPS_INS_BGEZAL = 83
MIPS_INS_BGEZALC = 84
MIPS_INS_BGEZALL = 85
MIPS_INS_BGEZALS = 86
MIPS_INS_BGEZC = 87
MIPS_INS_BGEZL = 88
MIPS_INS_BGTZ = 89
MIPS_INS_BGTZALC = 90
MIPS_INS_BGTZC = 91
MIPS_INS_BGTZL = 92
MIPS_INS_BINSLI = 93
MIPS_INS_BINSL = 94
MIPS_INS_BINSRI = 95
MIPS_INS_BINSR = 96
MIPS_INS_BITREV = 97
MIPS_INS_BITSWAP = 98
MIPS_INS_BLEZ = 99
MIPS_INS_BLEZALC = 100
MIPS_INS_BLEZC = 101
MIPS_INS_BLEZL = 102
MIPS_INS_BLTC = 103
MIPS_INS_BLTUC = 104
MIPS_INS_BLTZ = 105
MIPS_INS_BLTZAL = 106
MIPS_INS_BLTZALC = 107
MIPS_INS_BLTZALL = 108
MIPS_INS_BLTZALS = 109
MIPS_INS_BLTZC = 110
MIPS_INS_BLTZL = 111
MIPS_INS_BMNZI = 112
MIPS_INS_BMNZ = 113
MIPS_INS_BMZI = 114
MIPS_INS_BMZ = 115
MIPS_INS_BNE = 116
MIPS_INS_BNEC = 117
MIPS_INS_BNEGI = 118
MIPS_INS_BNEG = 119
MIPS_INS_BNEL = 120
MIPS_INS_BNEZ16 = 121
MIPS_INS_BNEZALC = 122
MIPS_INS_BNEZC = 123
MIPS_INS_BNVC = 124
MIPS_INS_BNZ = 125
MIPS_INS_BOVC = 126
MIPS_INS_BPOSGE32 = 127
MIPS_INS_BREAK = 128
MIPS_INS_BREAK16 = 129
MIPS_INS_BSELI = 130
MIPS_INS_BSEL = 131
MIPS_INS_BSETI = 132
MIPS_INS_BSET = 133
MIPS_INS_BZ = 134
MIPS_INS_BEQZ = 135
MIPS_INS_B = 136
MIPS_INS_BNEZ = 137
MIPS_INS_BTEQZ = 138
MIPS_INS_BTNEZ = 139
MIPS_INS_CACHE = 140
MIPS_INS_CEIL = 141
MIPS_INS_CEQI = 142
MIPS_INS_CEQ = 143
MIPS_INS_CFC1 = 144
MIPS_INS_CFCMSA = 145
MIPS_INS_CINS = 146
MIPS_INS_CINS32 = 147
MIPS_INS_CLASS = 148
MIPS_INS_CLEI_S = 149
MIPS_INS_CLEI_U = 150
MIPS_INS_CLE_S = 151
MIPS_INS_CLE_U = 152
MIPS_INS_CLO = 153
MIPS_INS_CLTI_S = 154
MIPS_INS_CLTI_U = 155
MIPS_INS_CLT_S = 156
MIPS_INS_CLT_U = 157
MIPS_INS_CLZ = 158
MIPS_INS_CMPGDU = 159
MIPS_INS_CMPGU = 160
MIPS_INS_CMPU = 161
MIPS_INS_CMP = 162
MIPS_INS_COPY_S = 163
MIPS_INS_COPY_U = 164
MIPS_INS_CTC1 = 165
MIPS_INS_CTCMSA = 166
MIPS_INS_CVT = 167
MIPS_INS_C = 168
MIPS_INS_CMPI = 169
MIPS_INS_DADD = 170
MIPS_INS_DADDI = 171
MIPS_INS_DADDIU = 172
MIPS_INS_DADDU = 173
MIPS_INS_DAHI = 174
MIPS_INS_DALIGN = 175
MIPS_INS_DATI = 176
MIPS_INS_DAUI | = 177
MIPS_INS_DBITSWAP = 178
MIPS_INS_DCLO = 179
MIPS_INS_DCLZ = 180
MIPS_INS_DDIV = 181
MIPS_INS_DDIVU = 182
MIPS_INS_DERET = 183
MIPS_INS_DEXT = 184
MIPS_INS_DEXTM = 185
MIPS_INS_DEXTU = 186
MIPS_INS_DI = 187
MIPS_INS_DINS = 188
MIPS_INS_DINSM = 189
MIPS_INS_DINSU = 190
MIPS_INS_DIV = 191
MIPS_INS_DIVU = 192
MIPS_INS_DIV_S = 193
MIPS_INS_DIV_U = 194
MIPS_INS_DLSA = 195
MIPS_INS_DMFC0 = 196 |
MIPS_INS_DMFC1 = 197
MIPS_INS_DMFC2 = 198
MIPS_INS_DMOD = 199
MIPS_INS_DMODU = 200
MIPS_INS_DMTC0 = 201
MIPS_INS_DMTC1 = 202
MIPS_INS_DMTC2 = 203
MIPS_INS_DMUH = 204
MIPS_INS_DMUHU = 205
MIPS_INS_DMUL = 206
MIPS_INS_DMULT = 207
MIPS_INS_DMULTU = 208
MIPS_INS_DMULU = 209
MIPS_INS_DOTP_S = 210
MIPS_INS_DOTP_U = 211
MIPS_INS_DPADD_S = 212
MIPS_INS_DPADD_U = 213
MIPS_INS_DPAQX_SA = 214
MIPS_INS_DPAQX_S = 21 |
feikname/Scripts | python/openspades_wiki_tools.py | Python | mit | 3,906 | 0.001024 | #
# openspades_wiki_tools.py - v0.0.0 (not a release!)
# MIT License
#
from __future__ import print_function
import os
import platform
import sys
"""
This function gathers all settings present in the OpenSpades user resource
directory configuration file (SPConfig.cfg) and generates a markdown string to
be used in the wiki.
If cfg_file=None, it searches automatically for the file.
See also: https://github.com/yvt/openspades/wiki/User-Resource-Directory
"""
def cfg_file_to_markdown(cfg_file=None):
# List for storing the list of configurations
cfg_list = []
# List of config variables that should have an link added to them
cfg_link_list = ["cg_Minimap_Player_Color"]
# No configuration file given, use platform-specific default one
if cfg_file == None:
cfg_file = get_spconfig_path()
# Open SPConfig.cfg in read-only mode
sp_cfg = open(cfg_file, "r")
# Parse each line
for line in sp_cfg:
_line = line.lstrip()
if (_line.startswith("#")) or (_line.rstrip() == ""):
continue
key = _line.split(":", 1)[0] # Everything before the ":' character
cfg_list.append(key)
# Close the file
sp_cfg.close()
# Output the markdown list
for cfg in cfg_list:
cfg_md = ""
if cfg in cfg_link_list:
link = "#" + cfg.lower()
cfg_md = " * [{0}]({1})".format(cfg, link)
e | lse:
cfg_md = " * {0}".format(cfg)
print(cfg_md)
def get_spconfig_path():
# Windows Location: %APPDATA%\OpenSpades\Resources
# Linux Location: $XDG_DATA_HOME/openspades/Resources
# Darwin Location: ~/Library/Application Support/OpenSpades/Resources
if platform.system() == "Windows":
raise | NotImplementedError("Windows not implemented yet")
appdata = os.environ.get("APPDATA")
if appdata == None:
raise RuntimeError("%APPDATA% not defined! (wtf?)")
cfg_file = os.path.join(appdata, "OpenSpades", "Resources",
"SPConfig.cfg")
return cfg_file
elif platform.system() == "Linux":
xdg = os.environ.get("XDG_DATA_HOME")
if xdg == None:
# Default to $HOME/.local/share, as defined by the specs
home = os.path.expanduser('~')
xdg = os.path.join(home, ".local", "share")
cfg_file = os.path.join(xdg, "openspades", "Resources",
"SPConfig.cfg")
return cfg_file
elif platform.system() == "Darwin":
home = os.path.expanduser('~')
cfg_file = os.path.join(home, "Library", "Application Support",
"OpenSpades", "Resources")
return cfg_file
else:
return None
# TODO: Colors
def display_help():
help_text = """openspades_wiki_tool.py - v0.0.0 (not a release!)
Usage: python openspades_wiki_tools.py [action] [action_arguments]
Available actions:
config_to_markdown [FILE]: Output the markdown index list of an SPConfig.cfg
note: The [FILE] argument is optional.
It's recommended to redirect the output to a file (e.g. tools.py > output.md)
"""
print(help_text)
NO_ARGUMENTS = (len(sys.argv) == 1) # First argument is the filename
INVALID_ACTION = True
try:
INVALID_ACTION = (sys.argv[1] not in ["config_to_markdown"])
except IndexError:
pass
if NO_ARGUMENTS:
display_help()
elif INVALID_ACTION:
print("Invalid action selected! Run this script with no arguments to see"
" the help text")
else:
if sys.argv[1] == "config_to_markdown":
if len(sys.argv) == 3:
cfg_file_to_markdown(sys.argv[2])
elif len(sys.argv) > 3:
raise RuntimeError("Your provided more than 2 arguments to the"
" config_to_markdown action!")
else:
cfg_file_to_markdown()
|
flavoso/gerencex | gerencex/core/tests/test_hoursbalance_model.py | Python | gpl-3.0 | 5,457 | 0.002201 | import datetime
import pytz
from django.utils import timezone
from django.contrib.auth.models import User
from django.test import TestCase
from gerencex.core.models import HoursBalance, Timing, Office
from gerencex.core.time_calculations import DateData
class HoursBalanceModelTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user('testuser', 'test@user.com', 'senha123')
def test_balances(self):
r1 = HoursBalance.objects.create(
date=datetime.date(2016, 8, 18),
user=self.user,
credit=datetime.timedelta(hours=6).seconds,
debit=datetime.timedelta(hours=7).seconds,
)
# Test creation
self.assertTrue(HoursBalance.objects.exists())
# First balance is calculated without a previous balance (see the
# total_balance_handler function at signals.py)
self.assertEqual(r1.balance, int(datetime.timedelta(hours=-1).total_seconds()))
# Second balance takes the first balance into account (see the
# total_balance_handler function at signals.py)
r2 = HoursBalance.objects.create(
date=datetime.date(2016, 8, 19),
user=self.user,
credit=datetime.timedelta(hours=6).seconds,
debit=datetime.timedelta(hours=7).seconds,
)
self.assertEqual(r2.balance, int(date | time.timedelta(hours=-2).total_seconds()))
# Change in first credit or debit must change the second balance (see the
# next_balance_handler function at signals.py)
r1.credit = datetime.timedelta(hours=7).seconds
r1.save()
r2 = HoursBalance.objects.get(pk=2)
self.assertEqual(r2.balance, i | nt(datetime.timedelta(hours=-1).total_seconds()))
class CreditTriggerTest(TestCase):
"""
The user credit is always registered at HourBalance via signal, when a checkout occurs.
See the 'credit_calculation' function, at signals.py
"""
@classmethod
def setUpTestData(cls):
Office.objects.create(name='Nenhuma lotação',
initials='NL',
regular_work_hours=datetime.timedelta(hours=6))
User.objects.create_user('testuser', 'test@user.com', 'senha123')
cls.user = User.objects.get(username='testuser')
def test_credit_triggers(self):
# Let's record a check in...
t1 = Timing.objects.create(
user=self.user,
date_time=timezone.make_aware(datetime.datetime(2016, 10, 3, 12, 0, 0, 0)),
checkin=True
)
# ...and a checkout
t2 = Timing.objects.create(
user=self.user,
date_time=timezone.make_aware(datetime.datetime(2016, 10, 3, 13, 0, 0, 0)),
checkin=False
)
# Let's record a balance line at HoursBalance
date = datetime.date(2016, 10, 3)
new_credit = DateData(self.user, date).credit().seconds
new_debit = DateData(self.user, date).debit().seconds
HoursBalance.objects.create(
date=date,
user=self.user,
credit=new_credit,
debit=new_debit
)
# Let's change t2 (checkout record)
t2.date_time += datetime.timedelta(hours=1)
t2.save()
# The balance must have been recalculated via django signal (signals.py)
checkout_tolerance = self.user.userdetail.office.checkout_tolerance
checkin_tolerance = self.user.userdetail.office.checkin_tolerance
tolerance = checkout_tolerance + checkin_tolerance
reference = datetime.timedelta(hours=2).seconds + tolerance.seconds
line = HoursBalance.objects.first()
credit = line.credit
self.assertEqual(reference, credit)
# Let's change t1 (checkin record)
t1.date_time += datetime.timedelta(hours=1)
t1.save()
# The balance must have been recalculated via signal
modified_reference = datetime.timedelta(hours=1).seconds + tolerance.seconds
modified_balance_line = HoursBalance.objects.first()
modified_credit = modified_balance_line.credit
self.assertEqual(modified_reference, modified_credit)
# TODO: Escrever o teste depois que já houver view para produzir o balanço da divisão e do usuário
class RestdayDebitTriggerTest(TestCase):
"""
When a we record a Restday whose date is prior to the date of the Balance, the balances must
be recalculated for all users.
"""
@classmethod
def setUpTestData(cls):
Office.objects.create(name='Diacomp 1', initials='diacomp1')
Office.objects.create(name='Diacomp 2', initials='diacomp2')
cls.diacomp1 = Office.objects.get(initials='diacomp1')
cls.diacomp2 = Office.objects.get(initials='diacomp2')
cls.diacomp1.hours_control_start_date = datetime.date(2016, 9, 1)
cls.diacomp1.save()
cls.diacomp2.hours_control_start_date = datetime.date(2016, 10, 1)
cls.diacomp1.save()
User.objects.create_user('testuser1', 'test1@user.com', 'senha123')
User.objects.create_user('testuser2', 'test2@user.com', 'senha123')
cls.user1 = User.objects.get(username='testuser')
cls.user2 = User.objects.get(username='testuser')
# def test_debit_trigger(self):
def activate_timezone():
return timezone.activate(pytz.timezone('America/Sao_Paulo'))
|
philipperemy/tensorflow-phased-lstm | helpers/__init__.py | Python | mit | 35 | 0.028571 | from .file_ | logger import FileLog | ger |
artas360/pythran | pythran/analyses/cfg.py | Python | bsd-3-clause | 4,991 | 0 | """ Computes the Control Flow Graph of a function. """
from pythran.passmanager import FunctionAnalysis
import ast
import networkx as nx
class CFG(FunctionAnalysis):
"""
Computes the Control Flow Graph of a function.
The processing of a node yields a pair containing
* the OUT nodes, to be linked with the IN nodes of the successor
* the RAISE nodes, nodes that stop the control flow (exception/break/...)
"""
def __init__(self):
self.result = nx.DiGraph()
super(CFG, self).__init__()
def visit_FunctionDef(self, node):
# the function itself is the entry point
self.result.add_node(node)
currs = (node,)
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, _ = self.visit(n)
# add an edge to None for nodes that end the control flow
# without a return
self.result.add_node(None)
for curr in currs:
self.result.add_edge(curr, None)
def visit_Pass(self, node):
"""OUT = node, RAISES = ()"""
return (node,), ()
# All these nodes have the same behavior as pass
visit_Assign = visit_AugAssign = visit_Import = visit_Pass
visit_Expr = visit_Print = visit_ImportFrom = visit_Pass
visit_Yield = visit_Delete = visit_Pass
def visit_Return(self, node):
"""OUT = (), RAISES = ()"""
return (), ()
def visit_For(self, node):
"""
OUT = (node,) + last body statements
RAISES = body's that are not break or continue
"""
currs = (node,)
break_currs = (node,)
raises = ()
# handle body
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
for nraise in nraises:
if type(nraise) is ast.Break:
break_currs += (nraise,)
elif type(nraise) is ast.Continue:
self.result.add_edge(nraise, node)
else:
raises += (nraise,)
# add the backward loop
for curr in currs:
self.result.add_edge(curr, node)
# the else statement if needed
if node.orelse:
for n in node.orelse:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
return break_currs + currs, raises
visit_While = visit_For
def visit_If(self, node):
"""
OUT = true branch U false branch
RAISES = true branch U false branch
"""
currs = (node,)
raises = ()
# true branch
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
raises += nraises
tcurrs = currs
# false branch
currs = (node,)
for n in node.orelse:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
raises += nraises
return tcurrs + currs, raises
def visit_Raise(self, node):
"""OUT = (), RAISES = (node)"""
return (), (node,)
visit_Break = visit_Continue = visit_Raise
def visit_Assert(self, node):
"""OUT = RAISES = (node)"""
return (node,), (node,)
def visit_TryExcept(self, node):
"""
OUT = body's U handler's
RAISES = hand | ler's
this equation is not has good has it could be...
but we need type information to be more accurate
"""
currs = (node,)
raises = ()
for handler in node.handlers:
self.result.add_node(handler)
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.resu | lt.add_edge(curr, n)
currs, nraises = self.visit(n)
for nraise in nraises:
if type(nraise) is ast.Raise:
for handler in node.handlers:
self.result.add_edge(nraise, handler)
else:
raises += (nraise,)
for handler in node.handlers:
ncurrs, nraises = self.visit(handler)
currs += ncurrs
raises += nraises
return currs, raises
def visit_ExceptHandler(self, node):
"""OUT = body's, RAISES = body's"""
currs = (node,)
raises = ()
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
raises += nraises
return currs, raises
|
gmimano/commcaretest | corehq/apps/api/serializers.py | Python | bsd-3-clause | 4,533 | 0.00375 |
# Standard library imports
from io import BytesIO
# Django & Tastypie imports
from django.utils.encoding import force_unicode
from tastypie.bundle import Bundle
from tastypie.serializers import Serializer, get_type_string
# External imports
import defusedxml.lxml as lxml
from lxml.etree import Element, tostring, LxmlError
class CommCareCaseSerializer(Serializer):
'''
A custom serializer that emits XML that matches a case's
definition according to CaseXML rather than the
automated Tastypie equivalent of the JSON output
'''
def case_to_etree(self, case):
'''
Encapsulates the version passed to `CommCareCase.to_xml` and
the temporary hack of re-parsing it. TODO: expose a direct etree
encoding in casexml?
'''
return lxml.parse(BytesIO(case.to_xml('2.0'))).getroot()
def bundle_to_etree(self, bundle):
'''
A new override point we have added - how to convert a single-object bundle to XML.
The list endpoint will re-use this. TODO: PR against tastypie to expose this hook?
'''
return self.case_to_etree(bundle.obj)
def to_etree(self, data, options=None, name=None, depth=0):
'''
Exact duplicate of tastypie.serializers.Serializer.to_etree with modification because
it does not expose sufficient APIs to customize just at the bundle level while reusing
all this same envelope code.
'''
if isinstance(data, (list, tuple)):
element = Element(name or 'objects')
if name:
element = Element(name)
element.set('type', 'list')
else:
element = Element('objects')
for item in data:
element.append(self.to_etree(item, options, depth=depth+1))
elif isinstance(data, dict):
if depth == 0:
element = Element(name or 'response')
else:
element = Element(name or 'object')
element.set('type', 'hash')
for (key, value) in data.iteritems():
element.append(self.to_etree(value, options, name=key, depth=depth+1))
elif isinstance(data, Bundle):
element = self.bundle_to_etree(data) # <--------------- this is the part that is changed from https://github.com/toastdriven/django-tastypie/blob/master/tastypie/serializers.py
elif hasattr(data, 'dehydrated_type'):
if getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == False:
if data.full:
return self.to_etree(data.fk_resource, options, name, depth+1)
else:
return self.to_etree(data.value, options, name, depth+1)
elif getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == True:
if data.full:
element = Element(name or 'objects')
for bundle in data.m2m_bundles:
element.append(self.to_etree(bundle, options, bundle.resource_name, depth+1))
else:
element = Element(name or 'objects')
for value in data.value:
element.append(self.to_etree(value, options, name, depth=depth+1))
else:
return self.to_etree(data.value, options, name)
else:
element = Element(name or 'value')
simple_data = self.to_simple(data, options)
data_type = get_type_string(simple_data)
if data_type != 'string':
element.set('type', get_type_string(simple_data))
if data_type != 'null':
if isinstance(simple_data, unicode):
element.text = simple_data
else:
element.text = force_unicode(simple_data)
return element
class CustomXMLSerializer(Serializer):
def to_etree(self, data, options=None, name=None, depth=0):
| etree = super(CustomXMLSerializer, self).to_etree(data, options, name, depth)
id = etree.find('id')
if id is not None:
etree.attrib['id'] = id.findtext('.')
etree.remove(id)
return etree
class XFormInstanceSerializer(Serializer):
def to_xml(self, data, options=None):
if isinstance(data, Bundle):
return data.obj.get_xml()
else:
return super(XFormInstanceSerializer, self).to_xml( | data, options=options)
|
danceasarxx/pyfunk | pyfunk/monads/maybe.py | Python | gpl-3.0 | 1,877 | 0.000533 | import functools
import pyfunk.combinators as comb
from pyfunk.monads import Monad
@comb.curry
def or_else(x, mb):
"""
Extract a Maybe's value providing a default if Nothing
@sig or_else :: b -> Maybe a -> b
"""
return x if mb.nothing() else mb._value
@comb.curry
def ftry(fn):
"""
Wraps the result of a try/except operation in a Maybe.
@sig cata :: Maybe m => (* -> a) -> (* -> m a)
"""
@functools. | wraps(fn)
def safe(*args, **kwargs):
try:
return Maybe.of(fn(*args, **kwargs))
except:
return Maybe.of(None)
return safe
@comb.curry
def cata(f, g, mb):
"""
Calls f if nothing exists and g if one does.
@sig cata :: Maybe e => (_ -> _) -> (a -> b) -> m a -> b |
"""
return f() if mb.nothing() else g(mb._value)
@comb.curry
def maybify(nullify, f):
"""
Creates a function that returns Maybe. Also accepts a Nonifier
that checks that transforms falsy value to None. One can use
misc.F if there is no need to nonify.
@sig maybify :: (a -> Bool) -> (a -> b) -> (a -> Maybe b)
"""
return comb.compose(lambda a: Maybe.of(None if nullify(a) else a), f)
class Maybe(Monad):
def nothing(self):
"""
Checks if the value of this Maybe is None.
@sig nothing :: Maybe m => _ -> Bool
"""
return self._value is None
def fmap(self, fn):
"""
Transforms the value of the Maybe monad using the given function
@sig fmap :: Maybe m => (a -> b) -> Maybe b
"""
return (Maybe.of(None) if self.nothing()
else Maybe.of(fn(self._value)))
def join(self):
"""
Lifts an Maybe monad out of another
@sig join :: Maybe i => i (i a) -> c a
"""
return (Maybe.of(None) if self.nothing()
else self._value)
|
ludi1001/IonMedicationSystem | mongobacked/mongobacked/wsgi.py | Python | gpl-2.0 | 397 | 0.002519 | """
WSGI config for mongobacked project.
It exposes the WSGI callable as a modu | le-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mongobacke | d.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
wger-project/wger | wger/nutrition/views/bmi.py | Python | agpl-3.0 | 7,575 | 0 | # -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
# Standard Library
import json
import logging
# Django
from django.contrib.auth.decorators import login_required
from django.h | ttp import HttpResponse
from django.shortcuts i | mport render
# wger
from wger.nutrition.forms import BmiForm
from wger.utils import helpers
logger = logging.getLogger(__name__)
"""
BMI views
"""
@login_required
def view(request):
"""
The BMI calculator detail page
"""
context = {}
form_data = {
'height': request.user.userprofile.height,
'weight': request.user.userprofile.weight
}
context['form'] = BmiForm(initial=form_data)
return render(request, 'bmi/form.html', context)
@login_required
def calculate(request):
"""
Calculates the BMI
"""
data = []
form = BmiForm(request.POST, instance=request.user.userprofile)
if form.is_valid():
form.save()
# Create a new weight entry as needed
request.user.userprofile.user_bodyweight(form.cleaned_data['weight'])
bmi = request.user.userprofile.calculate_bmi()
result = {
'bmi': '{0:.2f}'.format(bmi),
'weight': form.cleaned_data['weight'],
'height': request.user.userprofile.height,
}
data = json.dumps(result, cls=helpers.DecimalJsonEncoder)
# Return the results to the client
return HttpResponse(data, 'application/json')
def chart_data(request):
"""
Returns the data to render the BMI chart
The individual values taken from
* http://apps.who.int/bmi/index.jsp?introPage=intro_3.html
* https://de.wikipedia.org/wiki/Body-Mass-Index
"""
if request.user.userprofile.use_metric:
data = json.dumps(
[
{
'key': 'filler',
'height': 150,
'weight': 30
}, {
'key': 'filler',
'height': 200,
'weight': 30
}, {
'key': 'severe_thinness',
'height': 150,
'weight': 35.978
}, {
'key': 'severe_thinness',
'height': 200,
'weight': 63.960
}, {
'key': 'moderate_thinness',
'height': 150,
'weight': 38.228
}, {
'key': 'moderate_thinness',
'height': 200,
'weight': 67.960
}, {
'key': 'mild_thinness',
'height': 150,
'weight': 41.603
}, {
'key': 'mild_thinness',
'height': 200,
'weight': 73.960
}, {
'key': 'normal_range',
'height': 150,
'weight': 56.228
}, {
'key': 'normal_range',
'height': 200,
'weight': 99.960
}, {
'key': 'pre_obese',
'height': 150,
'weight': 67.478
}, {
'key': 'pre_obese',
'height': 200,
'weight': 119.960
}, {
'key': 'obese_class_1',
'height': 150,
'weight': 78.728
}, {
'key': 'obese_class_1',
'height': 200,
'weight': 139.960
}, {
'key': 'obese_class_2',
'height': 150,
'weight': 89.978
}, {
'key': 'obese_class_2',
'height': 200,
'weight': 159.960
}, {
'key': 'obese_class_3',
'height': 150,
'weight': 90
}, {
'key': 'obese_class_3',
'height': 200,
'weight': 190
}
]
)
else:
data = json.dumps(
[
{
'key': 'filler',
'height': 150,
'weight': 66.139
}, {
'key': 'filler',
'height': 200,
'weight': 66.139
}, {
'key': 'severe_thinness',
'height': 150,
'weight': 79.317
}, {
'key': 'severe_thinness',
'height': 200,
'weight': 141.008
}, {
'key': 'moderate_thinness',
'height': 150,
'weight': 84.277
}, {
'key': 'moderate_thinness',
'height': 200,
'weight': 149.826
}, {
'key': 'mild_thinness',
'height': 150,
'weight': 91.718
}, {
'key': 'mild_thinness',
'height': 200,
'weight': 163.054
}, {
'key': 'normal_range',
'height': 150,
'weight': 123.960
}, {
'key': 'normal_range',
'height': 200,
'weight': 220.374
}, {
'key': 'pre_obese',
'height': 150,
'weight': 148.762
}, {
'key': 'pre_obese',
'height': 200,
'weight': 264.467
}, {
'key': 'obese_class_1',
'height': 150,
'weight': 173.564
}, {
'key': 'obese_class_1',
'height': 200,
'weight': 308.559
}, {
'key': 'obese_class_2',
'height': 150,
'weight': 198.366
}, {
'key': 'obese_class_2',
'height': 200,
'weight': 352.651
}, {
'key': 'obese_class_3',
'height': 150,
'weight': 198.416
}, {
'key': 'obese_class_3',
'height': 200,
'weight': 352.740
}
]
)
# Return the results to the client
return HttpResponse(data, 'application/json')
|
awlange/brainsparks | src/sandbox/cost.py | Python | mit | 1,017 | 0 | import src.sandbox.linalg as linalg
import numpy as np
class Cost(object):
def __init__(self):
pass
@staticmethod
def get(name):
if name == "quadratic":
return Cost.quadratic
if name == "np_quadratic":
return Cost.np_quadratic
return None
@staticmethod
def get_d(name):
if name == "quadratic":
return Cost.d_quadrat | ic
if name == "np_quadratic":
return Cost.np_d_quadratic
return None
@staticmethod
def quadratic(y, a):
"""
Cos | t for a single training data
"""
return linalg.vsqdistw(a, y)
@staticmethod
def d_quadratic(y, a):
"""
Cost derivative for a single training data
"""
return linalg.vminw(a, y)
@staticmethod
def np_quadratic(y, a):
return np.mean(np.square(np.linalg.norm(a-y, axis=1, ord=2))) * 0.5
@staticmethod
def np_d_quadratic(y, a):
return (a - y) / len(y)
|
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/sympy/utilities/iterables.py | Python | agpl-3.0 | 34,793 | 0.001782 | from sympy.core import Basic, C
from sympy.core.compatibility import is_sequence, iterable #logically, they belong here
import random
def flatten(iterable, levels=None, cls=None):
"""
Recursively denest iterable containers.
>>> from sympy.utilities.iterables import flatten
>>> flatten([1, 2, 3])
[1, 2, 3]
>>> flatten([1, 2, [3]])
[1, 2, 3]
>>> flatten([1, [2, 3], [4, 5]])
[1, 2, 3, 4, 5]
>>> flatten([1.0, 2, (1, None)])
[1.0, 2, 1, None]
If you want to denest only a specified number of levels of
nested containers, then set ``levels`` flag to the desired
number of levels::
>>> ls = [[(-2, -1), (1, 2)], [(0, 0)]]
>>> flatten(ls, levels=1)
[(-2, -1), (1, 2), (0, 0)]
If cls argument is specified, it will only flatten instances of that
class, for example:
>>> from sympy.core import Basic
>>> class MyOp(Basic):
... pass
...
>>> flatten([MyOp(1, MyOp(2, 3))], cls=MyOp)
[1, 2, 3]
adapted from http://kogs-www.informatik.uni-hamburg.de/~meine/python_tricks
"""
if levels is not None:
if not levels:
return iterable
elif levels > 0:
levels -= 1
else:
raise ValueError("expected non-negative number of levels, got %s" % levels)
if cls is None:
reducible = lambda x: hasattr(x, "__iter__") and not isinstance(x, basestring)
else:
reducible = lambda x: isinstance(x, cls)
result = []
for el in iterable:
if reducible(el):
if hasattr(el, 'args'):
el = el.args
result.extend(flatten(el, levels=levels, cls=cls))
else:
result.append(el)
return result
def group(container, multiple=True):
"""
Splits a container into a list of lists of equal, adjacent elements.
>>> from sympy.utilities.iterables import group
>>> group([1, 1, 1, 2, 2, 3])
[[1, 1, 1], [2, 2], [3]]
>>> group([1, 1, 1, 2, 2, 3], multiple=False)
[(1, 3), (2, 2), (3, 1)]
"""
if not container:
return []
current, groups = [container[0]], []
for elem in container[1:]:
if elem == current[-1]:
current.append(elem)
else:
groups.append(current)
current = [elem]
groups.append(current)
if multiple:
return groups
for i, current in enumerate(groups):
groups[i] = (current[0], len(current))
return groups
def postorder_traversal(node):
"""
Do a postorder traversal of a tree.
This generator recursively yields nodes that it has visited in a postorder
fashion. That is, it descends through the tree depth-first to yield all of
a node's children's postorder traversal before yielding the node itself.
Parameters
----------
node : sympy expression
The expression to traverse.
Yields
------
subtree : sympy expression
All of the subtrees in the tree.
Examples
--------
>>> from sympy import symbols
>>> from sympy.utilities.iterables import postorder_traversal
>>> from sympy.abc import x, y, z
>>> set(postorder_traversal((x+y)*z)) == set([z, y, x, x + y, z*(x + y)])
True
"""
| if isinstance(node, Basic):
for arg in node.args:
for subtree in postorder_traversal(arg):
| yield subtree
elif iterable(node):
for item in node:
for subtree in postorder_traversal(item):
yield subtree
yield node
class preorder_traversal(object):
"""
Do a pre-order traversal of a tree.
This iterator recursively yields nodes that it has visited in a pre-order
fashion. That is, it yields the current node then descends through the tree
breadth-first to yield all of a node's children's pre-order traversal.
Parameters
----------
node : sympy expression
The expression to traverse.
Yields
------
subtree : sympy expression
All of the subtrees in the tree.
Examples
--------
>>> from sympy import symbols
>>> from sympy.utilities.iterables import preorder_traversal
>>> from sympy.abc import x, y, z
>>> set(preorder_traversal((x+y)*z)) == set([z, x + y, z*(x + y), x, y])
True
"""
def __init__(self, node):
self._skip_flag = False
self._pt = self._preorder_traversal(node)
def _preorder_traversal(self, node):
yield node
if self._skip_flag:
self._skip_flag = False
return
if isinstance(node, Basic):
for arg in node.args:
for subtree in self._preorder_traversal(arg):
yield subtree
elif iterable(node):
for item in node:
for subtree in self._preorder_traversal(item):
yield subtree
def skip(self):
"""
Skip yielding current node's (last yielded node's) subtrees.
Examples
--------
>>> from sympy import symbols
>>> from sympy.utilities.iterables import preorder_traversal
>>> from sympy.abc import x, y, z
>>> pt = preorder_traversal((x+y*z)*z)
>>> for i in pt:
... print i
... if i == x+y*z:
... pt.skip()
z*(x + y*z)
z
x + y*z
"""
self._skip_flag = True
def next(self):
return self._pt.next()
def __iter__(self):
return self
def interactive_traversal(expr):
"""Traverse a tree asking a user which branch to choose. """
from sympy.printing import pprint
RED, BRED = '\033[0;31m', '\033[1;31m'
GREEN, BGREEN = '\033[0;32m', '\033[1;32m'
YELLOW, BYELLOW = '\033[0;33m', '\033[1;33m'
BLUE, BBLUE = '\033[0;34m', '\033[1;34m'
MAGENTA, BMAGENTA = '\033[0;35m', '\033[1;35m'
CYAN, BCYAN = '\033[0;36m', '\033[1;36m'
END = '\033[0m'
def cprint(*args):
print "".join(map(str, args)) + END
def _interactive_traversal(expr, stage):
if stage > 0:
print
cprint("Current expression (stage ", BYELLOW, stage, END, "):")
print BCYAN
pprint(expr)
print END
if isinstance(expr, Basic):
if expr.is_Add:
args = expr.as_ordered_terms()
elif expr.is_Mul:
args = expr.as_ordered_factors()
else:
args = expr.args
elif hasattr(expr, "__iter__"):
args = list(expr)
else:
return expr
n_args = len(args)
if not n_args:
return expr
for i, arg in enumerate(args):
cprint(GREEN, "[", BGREEN, i, GREEN, "] ", BLUE, type(arg), END)
pprint(arg)
print
if n_args == 1:
choices = '0'
else:
choices = '0-%d' % (n_args-1)
try:
choice = raw_input("Your choice [%s,f,l,r,d,?]: " % choices)
except EOFError:
result = expr
print
else:
if choice == '?':
cprint(RED, "%s - select subexpression with the given index" % choices)
cprint(RED, "f - select the first subexpression")
cprint(RED, "l - select the last subexpression")
cprint(RED, "r - select a random subexpression")
cprint(RED, "d - done\n")
result = _interactive_traversal(expr, stage)
elif choice in ['d', '']:
result = expr
elif choice == 'f':
result = _interactive_traversal(args[0], stage+1)
elif choice == 'l':
result = _interactive_traversal(args[-1], stage+1)
elif choice == 'r':
result = _interactive_traversal(random.choice(args), stage+1)
else:
try:
choice = int(choice)
except ValueError:
cprint(BRED, "Choice must be a number in %s range\n" % choices)
result = _interactive_traversal(expr, stage |
whiskeylover/idreamoftoast | toast/config.py | Python | apache-2.0 | 1,878 | 0.009052 | """ config.py """
import os
from flask import Flask
from peewee import MySQLDatabase, SqliteDatabase
#-------------------------------------------------------------------------------
# Environment
#-------------------------------------------------------------------------------
DB = 'idreamoftoast'
ENV = os.environ.get('TOAST_PRODUCTION', None)
HOST = os.environ.get('TOAST_HOST', None)
USER = os.environ.get('TOAST_USER', None)
PASSWD = os.environ.get('TOAST_PASSWD', None)
LOG_PATH = os.environ.get('TOAST_LOG_PATH', './')
| #-------------------------------------------------------------------------------
# Config Methods
#------------------------------- | ------------------------------------------------
def get_app():
app = None
# If env is set, we are in production!
if ENV:
app = Flask(__name__)
import logging
file_handler = logging.FileHandler(LOG_PATH + 'flask.log')
file_handler.setLevel(logging.WARNING)
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
app.logger.addHandler(file_handler)
else:
# Development settings here!
app = Flask(__name__, static_folder='public', static_url_path='')
@app.route("/")
def root():
return app.send_static_file('index.html')
return app
def get_database():
db = None
# If env is set, we are in production!
if ENV:
# Production settings here!
if not (HOST or USER or PASSWD):
import sys
print('Environment variables NOT set!')
sys.exit()
db = MySQLDatabase(DB, host=HOST, user=USER, passwd=PASSWD)
else:
# Development settings here!
db = SqliteDatabase('toast.db')#, threadlocals=True)
return db
|
xiaomogui/lulu | lulu/lulu/python/linkAMQ/ConnectAMQ.py | Python | apache-2.0 | 4,337 | 0.005063 | __author__ = 'Administrator'
#coding=utf-8
import threading
from stompy.simple import Client
# 单例模式
class ConnectAMQSinleton(object):
__instance = None
__lock = threading.Lock() # used to synchronize code
__stompClient = None
# 默认订阅amf_pcap_dataamf_pcap_data队列
__subscribe = '/queue/amf_pcap_data'
def __init__(self):
'disable the __init__ method'
def __new__(cls):
return object.__new__(cls)
@staticmethod
def getInstance():
if not ConnectAMQSinleton.__instance:
ConnectAMQSinleton.__lock.acquire(0)
if not ConnectAMQSinleton.__instance:
ConnectAMQSinleton.__instance = object.__new__(ConnectAMQSinleton)
object.__init__(ConnectAMQSinleton.__instance)
ConnectAMQSinleton.__lock.release()
return ConnectAMQSinleton.__instance
def connect(self, hostname='192.168.1.166', port=61613):
# 通过simple方式连接JMS服务器
# 指定hostname和port(tips:ActiveMQ支持多种协议连接stomp协议的默认端口为61613,这里不要写为61616)
ConnectAMQSinleton.__stompClient = Client(hostname, port)
#stomp = Client()#如果是ActiveMQ和ActiveMQ客户端(本程序)在同一台机器可使用默认值:hostname="localhost",port=61613
# 连接服务器
ConnectAMQSinleton.__stompClient.connect()
def disConnect(self):
if ConnectAMQSinleton.__stompClient != None:
# 退订
ConnectAMQSinleton.__stompClient.unsubscribe(ConnectAMQSinleton.__subscribe)
# 关闭连接
ConnectAMQSinleton.__stompClient.disconnect()
def send(self, data):
# 发送消息到指定的queue
ConnectAMQSinleton.__stompClient.put(data, destination=ConnectAMQSinleton.__subscribe)
def receive(self):
# 从指定的queue订阅消息。ack参数指定为"client",不然可能出现一个问题(具体忘了,以后补充),ack默认值为"auto"
ConnectAMQSinleton.__stompClient.subscribe(ConnectAMQSinleton.__subscribe,ack="client")
# 等待接收ActiveMQ推送的消息
message = ConnectAMQSinleton.__stompClient.get()
# 消息的主体
receiveData = message.body
ConnectAMQSinleton.__stompClient.ack(message)
return receiveData
# get方法
def getSubscribe(self):
| return ConnectAMQSinleton.__subscribe
# set方法
def setSubscribe(self, subscribe):
ConnectAMQSinleton.__subscribe = subscribe
class ConnectAMQ:
__stompClient = None
# 默认订阅amf_pcap_dataamf_pcap_data队列
__subscribe = '/queue/amf_pcap_data'
def __init__(self):
'init'
def connect(self, hostname='192.168.1.166', port=61613):
# 通过 | simple方式连接JMS服务器
# 指定hostname和port(tips:ActiveMQ支持多种协议连接stomp协议的默认端口为61613,这里不要写为61616)
ConnectAMQ.__stompClient = Client(hostname, port)
#stomp = Client()#如果是ActiveMQ和ActiveMQ客户端(本程序)在同一台机器可使用默认值:hostname="localhost",port=61613
# 连接服务器
ConnectAMQ.__stompClient.connect()
def disConnect(self):
if ConnectAMQ.__stompClient != None:
# 退订
ConnectAMQ.__stompClient.unsubscribe(ConnectAMQ.__subscribe)
# 关闭连接
ConnectAMQ.__stompClient.disconnect()
def send(self, data):
# 发送消息到指定的queue
ConnectAMQ.__stompClient.put(data, destination=ConnectAMQ.__subscribe)
def receive(self):
# 从指定的queue订阅消息。ack参数指定为"client",不然可能出现一个问题(具体忘了,以后补充),ack默认值为"auto"
ConnectAMQ.__stompClient.subscribe(ConnectAMQ.__subscribe,ack="client")
# 等待接收ActiveMQ推送的消息
message = ConnectAMQ.__stompClient.get()
# 消息的主体
receiveData = message.body
ConnectAMQ.__stompClient.ack(message)
return receiveData
# get方法
def getSubscribe(self):
return ConnectAMQ.__subscribe
# set方法
def setSubscribe(self, subscribe):
ConnectAMQ.__subscribe = subscribe |
robalar/A2_Project | solver/__init__.py | Python | mit | 117 | 0.008547 | from solver.core import *
from solver.functions im | port *
from solver.calculus | import *
from solver.equations import * |
adrianolinux/lamp | lamp.py | Python | gpl-2.0 | 3,207 | 0.006548 | """
LAMP
"""
import numpy as np
tol = 1.e-6 # zero tolerance
def pdist(x):
"""
Pairwise distance between pairs of objects
TODO: find a fast function
"""
n, d = x.shape
dist = np.zeros((n, n))
for i in range(n):
for j in range(n):
dist[i][j] = np.linalg.norm(x[i] - x[j])
return dist
def project(x, xs, ys):
"""
Projection
"""
assert (type(x) is n | p.ndarray) and (type(xs) is np.ndarray) and (type(ys) is np.ndarray), \
"*** ERROR (Force-Scheme): project input must be numpy.array type."
ninst, dim = x.shape # number os instances, data di | mension
k, a = xs.shape # number os sample instances
p = ys.shape[1] # visual space dimension
assert dim == a, "*** LAMP Error: x and xs dimensions must be egual."
Y = np.zeros((ninst, p))
for pt in range(ninst):
# computes alphas
alpha = np.zeros(k)
for i in range(k):
# verify if the point to be projectec is a control point
# avoids division by zero
if np.linalg.norm(xs[i] - x[pt]) < tol:
alpha[i] = np.finfo(float).max
else:
alpha[i] = 1 / np.linalg.norm(xs[i] - x[pt])**2
# computes x~ and y~ (eq 3)
xtilde = np.zeros(dim)
ytilde = np.zeros(p)
for i in range(k):
xtilde += alpha[i] * xs[i]
ytilde += alpha[i] * ys[i]
xtilde /= np.sum(alpha)
ytilde /= np.sum(alpha)
A = np.zeros((k, dim))
B = np.zeros((k, p))
xhat = np.zeros((k, dim))
yhat = np.zeros((k, p))
# computation of x^ and y^ (eq 6)
for i in range(k):
xhat[i] = xs[i] - xtilde
yhat[i] = ys[i] - ytilde
A[i] = np.sqrt(alpha[i]) * xhat[i]
B[i] = np.sqrt(alpha[i]) * yhat[i]
U, D, V = np.linalg.svd(np.dot(A.T, B)) # (eq 7)
# VV is the matrix V filled with zeros
VV = np.zeros((dim, p)) # size of U = dim, by SVD
for i in range(p): # size of V = p, by SVD
VV[i,range(p)] = V[i]
M = np.dot(U, VV) # (eq 7)
Y[pt] = np.dot(x[pt] - xtilde, M) + ytilde # (eq 8)
return Y
def plot(y, t):
import matplotlib.pyplot as mpl
mpl.scatter(y.T[0], y.T[1], c = t)
mpl.show()
def test():
import time, sys, force
print "Loading data set... ",
sys.stdout.flush()
data = np.loadtxt("iris.data", delimiter=",")
print "Done."
n, d = data.shape
k = int(np.ceil(np.sqrt(n)))
x = data[:, range(d-1)]
t = data[:, d-1]
sample_idx = np.random.permutation(n)
sample_idx = sample_idx[range(k)]
xs = x[sample_idx, :]
# force
start_time = time.time()
print "Projecting samples... ",
sys.stdout.flush()
ys = force.project(xs)
print "Done. Elapsed time:", time.time() - start_time, "s."
# lamp
start_time = time.time()
print "Projecting... ",
sys.stdout.flush()
y = project(x, xs, ys)
print "Done. Elapsed time:", time.time() - start_time, "s."
plot(y, t)
if __name__ == "__main__":
print "Running test..."
test()
|
iosonofabio/singlet | singlet/counts_table/counts_table_sparse.py | Python | mit | 10,070 | 0.000894 | # vim: fdm=indent
# author: Fabio Zanini
# date: 09/08/17
# content: Sparse table of gene counts
# Modules
import numpy as np
import pandas as pd
# Classes / functions
class CountsTableSparse(pd.SparseDataFrame):
'''Sparse table of gene expression counts
- Rows are features, e.g. ge | nes.
- Columns are samples.
'''
_metadata = [
'name',
'_spikeins',
'_otherfeatures',
'_normalized',
'pseudocount',
'dataset',
]
_spikeins = ()
_otherfeatures = ()
_normalized = False
pseudocount = 0.1
dataset = None
@property
def _constructor(self):
return CountsTableSparse
@classmethod
def from_tablename(cls, tablename):
| '''Instantiate a CountsTable from its name in the config file.
Args:
tablename (string): name of the counts table in the config file.
Returns:
CountsTable: the counts table.
'''
from ..config import config
from ..io import parse_counts_table_sparse
self = cls(parse_counts_table_sparse({'countsname': tablename}))
self.name = tablename
config_table = config['io']['count_tables'][tablename]
self._spikeins = config_table.get('spikeins', [])
self._otherfeatures = config_table.get('other', [])
self._normalized = config_table['normalized']
return self
@classmethod
def from_datasetname(cls, datasetname):
'''Instantiate a CountsTable from its name in the config file.
Args:
datasetename (string): name of the dataset in the config file.
Returns:
CountsTableSparse: the counts table.
'''
from ..config import config
from ..io import parse_counts_table_sparse
self = cls(parse_counts_table_sparse({'datasetname': datasetname}))
self.name = datasetname
config_table = config['io']['datasets'][datasetname]['counts_table']
self._spikeins = config_table.get('spikeins', [])
self._otherfeatures = config_table.get('other', [])
self._normalized = config_table['normalized']
return self
def to_npz(self, filename):
'''Save to numpy compressed file format'''
from .io.npz import to_counts_table_sparse
to_counts_table_sparse(self, filename)
def exclude_features(self, spikeins=True, other=True, inplace=False,
errors='raise'):
'''Get a slice that excludes secondary features.
Args:
spikeins (bool): Whether to exclude spike-ins
other (bool): Whether to exclude other features, e.g. unmapped reads
inplace (bool): Whether to drop those features in place.
errors (string): Whether to raise an exception if the features
to be excluded are already not present. Must be 'ignore'
or 'raise'.
Returns:
CountsTable: a slice of self without those features.
'''
drop = []
if spikeins:
drop.extend(self._spikeins)
if other:
drop.extend(self._otherfeatures)
out = self.drop(drop, axis=0, inplace=inplace, errors=errors)
if inplace and (self.dataset is not None):
self.dataset._featuresheet.drop(drop, inplace=True, errors=errors)
return out
def get_spikeins(self):
'''Get spike-in features
Returns:
CountsTable: a slice of self with only spike-ins.
'''
return self.loc[self._spikeins]
def get_other_features(self):
'''Get other features
Returns:
CountsTable: a slice of self with only other features (e.g.
unmapped).
'''
return self.loc[self._otherfeatures]
def log(self, base=10):
'''Take the pseudocounted log of the counts.
Args:
base (float): Base of the log transform
Returns:
A transformed CountsTableSparse with zeros at the zero-count items.
'''
from scipy.sparse import coo_matrix
coo = self.to_coo()
coobase = np.log(self.pseudocount) * coo_matrix((np.ones(coo.nnz), (coo.row, coo.col)), shape=coo.shape)
coolog = ((coo / self.pseudocount).log1p() + coobase) / np.log(base)
# NOTE: the entries that should be log(pseudocount) are zeros now
clog = CountsTableSparse(
coolog,
index=self.index,
columns=self.columns,
dtype=float,
default_fill_value=0)
return clog
def unlog(self, base=10):
'''Reverse the pseudocounted log of the counts.
Args:
base (float): Base of the log transform
Returns:
A transformed CountsTableSparse.
'''
from scipy.sparse import coo_matrix
coo = self.to_coo()
coobase = np.log(self.pseudocount) * coo_matrix((np.ones(coo.nnz), (coo.row, coo.col)), shape=coo.shape)
cooexp = (coo * np.log(base) - coobase).expm1() * self.pseudocount
cexp = CountsTableSparse(
cooexp,
index=self.index,
columns=self.columns,
dtype=float,
default_fill_value=0)
return cexp
def normalize(
self,
method='counts_per_million',
include_spikeins=False,
**kwargs):
'''Normalize counts and return new CountsTable.
Args:
method (string or function): The method to use for normalization.
One of 'counts_per_million', 'counts_per_thousand_spikeins',
'counts_per_thousand_features'm 'counts_per_million_column'.
If this argument is a function, its signature depends on the
inplace argument. It must take the CountsTable as input and
return the normalized one as output. You can end your function
by self[:] = <normalized counts>.
include_spikeins (bool): Whether to include spike-ins in the
normalization and result.
inplace (bool): Whether to modify the CountsTable in place or
return a new one.
Returns:
A new, normalized CountsTableSparse.
NOTE: if method == 'counts_per_million_column', you have to use an
additional keyword argument called 'column' that specifies the column
of the samplesheet containing the normalization baseline. For instance,
if your samplesheet has a column called 'total_counts' that you want to
use for normalization, call:
CountsTableSparse.normalize(
method='counts_per_million_column',
column='total_counts')
This requires the count table to be linked to a Dataset.
'''
import copy
if method == 'counts_per_million':
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = counts.sum(axis=0)
counts_norm = 1e6 * counts / norm
elif method == 'counts_per_thousand_spikeins':
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.get_spikeins().sum(axis=0)
counts_norm = 1e3 * counts / norm
elif method == 'counts_per_thousand_features':
if 'features' not in kwargs:
raise ValueError('Set features=<list of normalization features>')
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.loc[kwargs['features']].sum(axis=0)
counts_norm = 1e3 * counts / norm
elif method == 'counts_per_million_column':
if 'column' not in kwargs:
raise ValueError('Specify a samplesheet column with column=<mycolumn>')
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.dataset[kwargs['column']].values
counts_norm = 1e6 * counts / norm
elif callable(method):
cou |
PyPSA/PyPSA | pypsa/io.py | Python | mit | 43,000 | 0.00707 |
## Copyright 2015-2021 PyPSA Developers
## You can find the list of PyPSA Developers at
## https://pypsa.readthedocs.io/en/latest/developers.html
## PyPSA is released under the open source MIT License, see
## https://github.com/PyPSA/PyPSA/blob/master/LICENSE.txt
"""Functions for importing and exporting data.
"""
__author__ = "PyPSA Developers, see https://pypsa.readthedocs.io/en/latest/developers.html"
__copyright__ = ("Copyright 2015-2021 PyPSA Developers, see https://pypsa.readthedocs.io/en/latest/developers.html, "
"MIT License")
import logging
logger = logging.getLogger(__name__)
import os
from textwrap import dedent
from glob import glob
import pandas as pd
import numpy as np
import math
try:
import xarray as xr
has_xarray = True
except ImportError:
has_xarray = False
class ImpExper(object):
ds = None
def __enter__(self):
if self.ds is not None:
self.ds = self.ds.__enter__()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self.finish()
if self.ds is not None:
self.ds.__exit__(exc_type, exc_val, exc_tb)
def finish(self):
pass
class Exporter(ImpExper):
def remove_static(self, list_name):
pass
def remove_series(self, list_name, attr):
pass
class Importer(ImpExper):
pass
class ImporterCSV(Importer):
def __init__(self, csv_folder_name, encoding):
self.csv_folder_name = csv_folder_name
self.encoding = encoding
assert os.path.isdir(csv_folder_name), "Directory {} does not exist.".format(csv_fol | der_name)
def get_attributes(self):
fn = os.path.join(self.csv_folder_name, "netw | ork.csv")
if not os.path.isfile(fn): return None
return dict(pd.read_csv(fn, encoding=self.encoding).iloc[0])
def get_snapshots(self):
fn = os.path.join(self.csv_folder_name, "snapshots.csv")
if not os.path.isfile(fn): return None
df = pd.read_csv(fn, index_col=0, encoding=self.encoding, parse_dates=True)
# backwards-compatibility: level "snapshot" was rename to "timestep"
if "snapshot" in df:
df["snapshot"] = pd.to_datetime(df.snapshot)
if "timestep" in df:
df["timestep"] = pd.to_datetime(df.timestep)
return df
def get_investment_periods(self):
fn = os.path.join(self.csv_folder_name, "investment_periods.csv")
if not os.path.isfile(fn): return None
return pd.read_csv(fn, index_col=0, encoding=self.encoding)
def get_static(self, list_name):
fn = os.path.join(self.csv_folder_name, list_name + ".csv")
return (pd.read_csv(fn, index_col=0, encoding=self.encoding)
if os.path.isfile(fn) else None)
def get_series(self, list_name):
for fn in os.listdir(self.csv_folder_name):
if fn.startswith(list_name+"-") and fn.endswith(".csv"):
attr = fn[len(list_name)+1:-4]
df = pd.read_csv(os.path.join(self.csv_folder_name, fn),
index_col=0, encoding=self.encoding, parse_dates=True)
yield attr, df
class ExporterCSV(Exporter):
def __init__(self, csv_folder_name, encoding):
self.csv_folder_name = csv_folder_name
self.encoding = encoding
#make sure directory exists
if not os.path.isdir(csv_folder_name):
logger.warning("Directory {} does not exist, creating it"
.format(csv_folder_name))
os.mkdir(csv_folder_name)
def save_attributes(self, attrs):
name = attrs.pop('name')
df = pd.DataFrame(attrs, index=pd.Index([name], name='name'))
fn = os.path.join(self.csv_folder_name, "network.csv")
df.to_csv(fn, encoding=self.encoding)
def save_snapshots(self, snapshots):
fn = os.path.join(self.csv_folder_name, "snapshots.csv")
snapshots.to_csv(fn, encoding=self.encoding)
def save_investment_periods(self, investment_periods):
fn = os.path.join(self.csv_folder_name, "investment_periods.csv")
investment_periods.to_csv(fn, encoding=self.encoding)
def save_static(self, list_name, df):
fn = os.path.join(self.csv_folder_name, list_name + ".csv")
df.to_csv(fn, encoding=self.encoding)
def save_series(self, list_name, attr, df):
fn = os.path.join(self.csv_folder_name, list_name + "-" + attr + ".csv")
df.to_csv(fn, encoding=self.encoding)
def remove_static(self, list_name):
fns = glob(os.path.join(self.csv_folder_name, list_name) + "*.csv")
if fns:
for fn in fns: os.unlink(fn)
logger.warning("Stale csv file(s) {} removed".format(', '.join(fns)))
def remove_series(self, list_name, attr):
fn = os.path.join(self.csv_folder_name, list_name + "-" + attr + ".csv")
if os.path.exists(fn):
os.unlink(fn)
class ImporterHDF5(Importer):
def __init__(self, path):
self.ds = pd.HDFStore(path, mode='r')
self.index = {}
def get_attributes(self):
return dict(self.ds["/network"].reset_index().iloc[0])
def get_snapshots(self):
return self.ds["/snapshots"] if "/snapshots" in self.ds else None
def get_investment_periods(self):
return self.ds["/investment_periods"] if "/investment_periods" in self.ds else None
def get_static(self, list_name):
if "/" + list_name not in self.ds:
return None
if self.pypsa_version is None or self.pypsa_version < [0, 13, 1]:
df = self.ds["/" + list_name]
else:
df = self.ds["/" + list_name].set_index('name')
self.index[list_name] = df.index
return df
def get_series(self, list_name):
for tab in self.ds:
if tab.startswith('/' + list_name + '_t/'):
attr = tab[len('/' + list_name + '_t/'):]
df = self.ds[tab]
if self.pypsa_version is not None and self.pypsa_version > [0, 13, 0]:
df.columns = self.index[list_name][df.columns]
yield attr, df
class ExporterHDF5(Exporter):
def __init__(self, path, **kwargs):
self.ds = pd.HDFStore(path, mode='w', **kwargs)
self.index = {}
def save_attributes(self, attrs):
name = attrs.pop('name')
self.ds.put('/network',
pd.DataFrame(attrs, index=pd.Index([name], name='name')),
format='table', index=False)
def save_snapshots(self, snapshots):
self.ds.put('/snapshots', snapshots, format='table', index=False)
def save_investment_periods(self, investment_periods):
self.ds.put('/investment_periods', investment_periods, format='table', index=False)
def save_static(self, list_name, df):
df.index.name = 'name'
self.index[list_name] = df.index
df = df.reset_index()
self.ds.put('/' + list_name, df, format='table', index=False)
def save_series(self, list_name, attr, df):
df.columns = self.index[list_name].get_indexer(df.columns)
self.ds.put('/' + list_name + '_t/' + attr, df, format='table', index=False)
if has_xarray:
class ImporterNetCDF(Importer):
def __init__(self, path):
self.path = path
if isinstance(path, str):
self.ds = xr.open_dataset(path)
else:
self.ds = path
def __enter__(self):
if isinstance(self.path, str):
super(ImporterNetCDF, self).__init__()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if isinstance(self.path, str):
super(ImporterNetCDF, self).__exit__(exc_type, exc_val, exc_tb)
def get_attributes(self):
return {attr[len('network_'):]: val
for attr, val in self.ds.attrs.items()
if attr.startswith('network_')}
def get_snapshots(self):
return self.get_static('snapshots', 'snapshots')
def get_investment_periods |
fretboardfreak/space | tests/lib/cmdline/commands/test_quit.py | Python | apache-2.0 | 839 | 0 | # Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you ma | y not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either | express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import lib.cmdline.commands as commands
from .test_base import BaseCommandTest
class QuitTest(BaseCommandTest):
def setUp(self):
super().setUp()
self.command_class = commands.Quit
self.alias_commands = ['do_q', 'do_EOF', 'do_exit']
|
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/pylint/test/input/func_tokenize_error.py | Python | agpl-3.0 | 173 | 0.00578 | "" | "A module that is accepted by Python but rejected by tokenize.
The problem is the trailing line continuation at the end of the line,
which produces a TokenError. | """
""\
|
gamechanger/hydra | copy_stragglers.py | Python | bsd-2-clause | 4,159 | 0.003126 | #! /usr/bin/env python
import argparse
from compare_collections import MismatchLogger
from faster_ordered_dict import FasterOrderedDict
import gevent
import gevent.monkey
from gevent.pool import Pool
from pymongo.read_preferences import ReadPreference
import time
import utils
log = utils.get_logger(__name__)
POOL_SIZE = 20
class Stats(object):
def __init__(self):
self.start_time = time.time()
self.processed = 0
self.not_found = 0
self.total = None
def log(self):
log.info("%d / %d processed | %d not found", stats.processed, stats.total, stats.not_found)
def copy_document_worker(query_doc, source_collection, dest_collection, stats):
"""
greenlet function that copies a document identified by the query document
there is a *very* narrow race condition where the document might be deleted from the source
between our find() and save(); that seems an acceptable risk
"""
docs = [doc for doc in source_collection.find(query_doc)]
assert len(docs) <= 1
if len(docs) == 0:
# if the document has been deleted from the source, we assume that the oplog applier
# will delete from the destination in the future
stats.not_found += 1
stats.processed += 1
else:
# we have the document, so copy it
dest_collection.save(docs[0])
stats.processed +=1
def stats_worker(stats):
"""
prints stats periodically
"""
while True:
gevent.sleep(3)
stats.log()
if __name__ == '__main__':
utils.tune_gc()
gevent.monkey.patch_socket()
parser = argparse.ArgumentParser(description='Through stdin, reads JSON documents containing _ids and shark keys for mismatching documents and re-copies those documents.')
parser.add_argument(
'--source', type=str, required=True, metavar='URL',
help='source to read from; e.g. localhost:27017/prod_maestro.emails')
parser.add_argument(
'--dest', type=str, required=True, metavar='URL',
help='destination to copy to; e.g. localhost:27017/destination_db.emails')
parser.add_argument(
'--mismatches-file', type=str, default=None, required=True, metavar='FILENAME',
help='read ids to copy from this file, which is generated by compare_collections. | py' | )
args = parser.parse_args()
# connect to source and destination
source = utils.parse_mongo_url(args.source)
source_client = utils.mongo_connect(source,
ensure_direct=True,
maxPoolSize=POOL_SIZE,
read_preference=ReadPreference.SECONDARY_PREFERRED,
document_class=FasterOrderedDict)
source_collection = source_client[source['db']][source['collection']]
if not source_client.is_mongos or source_client.is_primary:
raise Exception("source must be a mongos instance or a primary")
dest = utils.parse_mongo_url(args.dest)
dest_client = utils.mongo_connect(dest,
maxPoolSize=POOL_SIZE,
document_class=FasterOrderedDict)
dest_collection = dest_client[dest['db']][dest['collection']]
if source == dest:
raise ValueError("source and destination cannot be the same!")
# periodically print stats
stats = Stats()
stats_greenlet = gevent.spawn(stats_worker, stats)
# copy documents!
pool = Pool(POOL_SIZE)
with open(args.mismatches_file) as mismatches_file:
lines = mismatches_file.readlines() # copy everything into memory -- hopefully that isn't huge
stats.total = len(lines)
for line in lines:
query_doc = {'_id': MismatchLogger.decode_mismatch_id(line)}
pool.spawn(copy_document_worker,
query_doc=query_doc,
source_collection=source_collection,
dest_collection=dest_collection,
stats=stats)
# wait for everythng to finish
gevent.sleep()
pool.join()
stats_greenlet.kill()
stats.log()
log.info('done')
|
dowlingw/pelican-attachments | __init__.py | Python | gpl-3.0 | 1,859 | 0.009145 | import os
import shutil
import six
from logging import warning, info
from pelican import signals
from pelican.contents import Static
from pelican.utils import copy, process_translations, mkdir_p
METADATA_KEY = 'attachments'
def get_generators(generators):
return AttachmentGenerator
def register():
signals.get_generators.connect(get_generators)
class AttachmentGenerator(object):
def __init__(self, context, settings, path, theme, output_path, *null):
self.staticfiles = []
self.output_path = output_path
self.context = context
self.sett | ings = settings
self.siteurl = settings.get('SITEURL')
def gener | ate_context(self):
for article in self.context['articles']:
if METADATA_KEY in article.metadata:
files = article.metadata[METADATA_KEY].split(', ')
for file in files:
self._emitfile(article,file)
def _emitfile(self, article, filename):
file_path = os.path.join( os.path.dirname(article.get_relative_source_path()), filename )
src_path = os.path.join( os.path.dirname(article.source_path), filename )
file_relurl = os.path.join( article.url, filename )
metadata = { 'save_as': file_relurl }
sc = Static(
content=None,
metadata=metadata,
settings=self.settings,
source_path=src_path )
sc.override_url = file_relurl
self.staticfiles.append(sc)
self.context['filenames'][file_path] = sc
def generate_output(self, writer):
for sc in self.staticfiles:
output_file = os.path.join(self.output_path, sc.save_as )
try:
os.makedirs(os.path.dirname(output_file) )
except:
pass
shutil.copy(sc.source_path, output_file)
|
goshow-jp/Kraken | Python/kraken/ui/HAppkit_Editors/editor_widgets/color_editor.py | Python | bsd-3-clause | 2,883 | 0.005203 | from PySide import QtGui
from ..fe import FE
from ..widget_factory import EditorFactory
from ..base_editor import BaseValueEditor
from ..core.undo_redo_manager import UndoRedoManager
class ColorEditor(BaseValueEditor):
def __init__(self, valueController, parent=None):
super(ColorEditor, self).__init__(valueController, pa | rent=parent)
| hbox = QtGui.QHBoxLayout()
self._editors = []
self._qgraphcsScene = QtGui.QGraphicsScene(self)
self._qgraphcsView = QtGui.QGraphicsView(self)
self._qgraphcsView.setScene(self._qgraphcsScene)
self._qgraphcsView.setFixedSize(100, 20)
self._qgraphcsView.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
hbox.addWidget(self._qgraphcsView, 1)
hbox.addStretch(0)
hbox.setContentsMargins(0, 0, 0, 0)
self.setLayout(hbox)
self.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
self.updateEditorValue()
self.setEditable( valueController.isEditable() )
def setEditable(self, editable):
self.setEnabled(editable)
def getEditorValue(self):
scalarKLType = FE.getInstance().types().Scalar
colorKLType = FE.getInstance().types().Color
return colorKLType(
scalarKLType(self.__color.redF()),
scalarKLType(self.__color.greenF()),
scalarKLType(self.__color.blueF())
)
def setEditorValue(self, value):
self.__color = QtGui.QColor(value.r*255, value.g*255, value.b*255)
self._qgraphcsView.setBackgroundBrush(self.__color)
self._qgraphcsView.update()
def mousePressEvent(self, event):
if self._valueController.isEditable():
self.__backupColor = self.__color
self.beginInteraction()
dialog = QtGui.QColorDialog(self.__color, self)
dialog.currentColorChanged.connect(self.__onColorChanged)
dialog.accepted.connect(self.__onAccepted)
dialog.rejected.connect(self.__onCanceled)
dialog.setModal(True)
dialog.show()
def __onColorChanged(self, qcolor):
self.__color = QtGui.QColor(qcolor.redF()*255, qcolor.greenF()*255, qcolor.blueF()*255)
self._qgraphcsView.setBackgroundBrush(self.__color)
self._setValueToController()
def __onAccepted(self):
self.endInteraction()
def __onCanceled(self):
self.endInteraction()
undoManager = UndoRedoManager.getInstance()
if undoManager and undoManager.canUndo():
undoManager.undo()
else:
self.__onColorChanged(self.__backupColor)
@classmethod
def canDisplay(cls, valueController):
if valueController.getDataType() == 'Color':
return True
return False
EditorFactory.registerEditorClass(ColorEditor)
|
simgunz/timekpr | src/original/gui/client/timekpr-client.py | Python | gpl-3.0 | 9,751 | 0.004 | #!/usr/bin/env python
import gtk
import gobject
import os
from time import strftime, sleep
import datetime
from timekprpam import *
from timekprcommon import *
import locale
import gettext
import sys
APP_NAME = "timekpr"
#Translation stuff
#Get the local directory
local_path = '/usr/share/locale'
locale.setlocale(locale.LC_ALL, '')
gettext.bindtextdomain(APP_NAME, local_path)
gettext.textdomain(APP_NAME)
_ = gettext.gettext
class TimekprClient:
def __init__(self):
self.VAR = getvariables(False)
self.checkInterval = 60
self.tray = gtk.StatusIcon()
self.red = self.VAR['TIMEKPRSHARED'] + '/timekpr32x32.png'
self.green = self.VAR['TIMEKPRSHARED'] + '/padlock-green.png'
self.tray.set_from_file(self.red)
self.tray.set_tooltip('Timekpr-client')
self.tray.set_visible(True)
self.tray.connect('activate', self.on_activate)
self.tray.connect('popup-menu', self.on_popup_menu)
self.username = os.getenv('USER')
self.timefile = self.VAR['TIMEKPRWORK'] + '/' + self.username + '.time'
self.allowfile = self.VAR['TIMEKPRWORK'] + '/' + self.username + '.allow'
self.conffile = self.VAR['TIMEKPRDIR'] + '/' + self.username
self.limits, self.bfrom, self.bto = readusersettings(self.username, self.conffile)
self.timer = None
self.checkLimits()
#Add a gobject loop to check limits:
self.timer = gobject.timeout_add(self.checkInterval * 1000, self.checkLimits)
#Add a notifier every 15 minutes
gobject.timeout_add(15 * 60 * 1000, self.pnotifier)
self.click = False
self.lastNotified = datetime.datetime.now()
def fractSec(self, s):
m, s = divmod(s, 60)
h, m = divmod(m, 60)
return h, m, s
'''
Returns the number of seconds a user has left, False if user.time does not exist
'''
def gettime(self, tfile):
if not isfile(tfile):
return False
t = open(tfile)
time = int(t.readline())
t.close()
return time
'''
Returns current time
'''
def now(self):
return datetime.datetime.now()
def timeofbto(self, index):
y = datetime.date.today().year
m = datetime.date.today().month
d = datetime.date.today().day
h = self.bto[index]
date = datetime.date(y, m, d)
if h == 24:
h = 0
date = date + datetime.timedelta(days=1)
dt = datetime.datetime(date.year, date.month, date.day, h, 0, 0)
return dt
'''
Detect and return the desktop environment user is using
'''
def get_de(self):
if "KDE_FULL_SESSION" in os.environ or "KDE_MULTIHEAD" in os.environ:
return "KDE"
elif "GNOME_DESKTOP_SESSION_ID" in os.environ or "GNOME_KEYRING_SOCKET" in os.environ:
return "GNOME"
elif getcmdoutput("xprop -root _DT_SAVE_MODE").strip().endswith(' = "xfce4"'):
return "XFCE"
'''
Returns the version of KDE in use (4 if KDE4, or 3 for everything else)
'''
def kde_version(self):
version = getcmdoutput('echo $KDE_SESSION_VERSION')
if version == "\n":
return 3
else:
return int(version)
'''
Left click on tray icon
'''
def on_activate(self, data):
self.click = True
self.pnotifier()
'''
Right click on tray icon
Should we add a menu to this action?
'''
def on_popup_menu(self, status, button, time):
self.click = True
| self.pnotifier()
'''
Run every checkInterval seconds, check if user has run out of time
'''
def checkLimits(self):
# Re-read settings in case they changed
self.limits, sel | f.bfrom, self.bto = readusersettings(self.username, self.conffile)
index = int(strftime("%w"))
# If the user is not a restricted user, set the tray icon to green padlock
if not isrestricteduser(self.username, self.limits[index]):
self.tray.set_from_file(self.green)
return
else:
self.tray.set_from_file(self.red)
# In case timefile does not exist yet
if not self.gettime(self.timefile):
return True
time = self.gettime(self.timefile)
if isearly(self.bfrom, self.allowfile):
self.notifier(_('You are early, you will be logged out in LESS than 2 minutes'))
if islate(self.bto, self.allowfile):
self.notifier(_('You are late, you will be logged out in LESS than 2 minutes'))
if ispasttime(self.limits, time):
self.notifier(_('Your time is up, you will be logged out in LESS than 2 minutes'))
return True
'''
Returns a formated string with the time left for a user
'''
def timeleftstring(self, h, m, s):
# Hour Plurality
if h == 1:
hplr = "hour"
elif h > 1:
hplr = "hours"
# Minute Plurality
if m ==1:
mplr = "minute"
elif m > 1:
mplr = "minutes"
# Second Plurality
if s == 1:
splr = "second"
elif s > 1:
splr = "seconds"
# Printing Correctly
if h >= 1 and m >= 1 and s >= 1:
message = _('You have %(hour)s %(hplr)s, %(min)s %(mplr)s and %(sec)s %(splr)s left') % {'hour': h, 'hplr': hplr, 'min': m, 'mplr': mplr, 'sec': s, 'splr': splr}
elif h >= 1 and m >= 1 and s == 0:
message = _('You have %(hour)s %(hplr)s, %(min)s %(mplr)s left') % {'hour': h, 'hplr': hplr, 'min': m, 'mplr': mplr,}
elif h >= 1 and m == 0 and s >= 1:
message = _('You have %(hour)s %(hplr)s and %(sec)s %(splr)s left') % {'hour': h, 'hplr': hplr, 'sec': s, 'splr': splr}
elif h >= 1 and m == 0 and s == 0:
message = _('You have %(hour)s %(hplr)s left') % {'hour': h, 'hplr': hplr,}
if h == 0 and m >= 1 and s >= 1:
message = _('You have %(min)s %(mplr)s and %(sec)s %(splr)s left') % {'min': m, 'mplr': mplr, 'sec': s, 'splr': splr}
elif h == 0 and m >= 1 and s == 0:
message = _('You have %(min)s %(mplr)s left') % {'min': m, 'mplr': mplr,}
elif h == 0 and m == 0 and s >= 1:
message = _('You have %(sec)s %(splr)s left') % {'sec': s, 'splr': splr}
return message
'''
Periodic notifier, gives notifications to the user.
Runs every 15 minutes, as long as time left > 15 minutes
'''
def pnotifier(self):
if not self.gettime(self.timefile):
return True
index = int(strftime("%w"))
# How much time if left?
usedtime = self.gettime(self.timefile)
timeleft = self.limits[index] - usedtime
timeuntil = self.timeofbto(index) - self.now()
tuntil = timeuntil.seconds
# What is less?
if timeleft <= tuntil:
left = timeleft
else:
left = tuntil
# If the time is up, notifications is taken care of by checkLimits
if left <= 0:
return True
h, m, s = self.fractSec(left)
message = self.timeleftstring(h, m, s)
self.notifier(message)
# if time left is less than 8 minutes, notify every second minute
if left < 480 and not self.click:
gobject.timeout_add(2 * 60 * 1000, self.pnotifier)
return False
# if time left is less than 20 minutes, notify every 5 minutes
if left < 1200 and not self.click:
gobject.timeout_add(5 * 60 * 1000, self.pnotifier)
return False
self.click = False
return True
'''
Actual notifier
'''
def notifier(self, message):
index = int(strftime("%w"))
# Don't notify an unrestricted user
if not isrestricteduser(self.username, self.limits[index]):
return
title = "Timekpr"
# Don't notify if we just gave a notification
if (datetime.datetime.now() - self.lastNotified).seconds < 5:
return
# Gnome and XFCE can user n |
zubie7a/Algorithms | CodeSignal/Arcade/Intro/Level_03/02_Common_Character_Count.py | Python | mit | 637 | 0.00157 | # https://app.codesignal.com/arcade/intro/level-3/JKKuHJknZNj4YGL32
from collections impor | t Counter
def commonCharacterCount(s1, s2):
# When counter is initialized with a Str | ing, since its a
# sequence it will store counts for each of its characters.
c1 = Counter(s1)
c2 = Counter(s2)
common = 0
# Given two strings, find the amount of common characters
# between both. This is the min count of a given character
# ocurring at both. If it doesn't occur, then value is 0
# and min will be 0.
for item in c2.items():
key = item[0]
common += min(c2[key], c1[key])
return common
|
PaulWay/insights-core | insights/tests/test_extractors.py | Python | apache-2.0 | 3,827 | 0.001307 | import os
import unittest
import tempfile
import subprocess
import shlex
import gzip
import zipfile
from insights.core import archives
from insights.core.specs import SpecMapper
from . import insights_heartbeat
class TestTarExtractor(unittest.TestCase):
def test_from_buffer_with_directory(self):
arc_path = insights_heartbeat()
tmp_dir = tempfile.mkdtemp()
command = "tar -a -x -f %s -C %s" % (arc_path, tmp_dir)
subprocess.call(shlex.split(command))
with archives.TarExtractor() as tar_ex:
with archives.TarExtractor() as dir_ex:
tar_tf = tar_ex.from_path(arc_path)
tar_all_files = tar_tf.getnames()
dir_tf = dir_ex.from_path(tmp_dir)
dir_all_files = dir_tf.getnames()
self.assertEqual(len(tar_all_files), len(dir_all_files))
for tar_path in tar_all_files:
dir_path = os.path.join(dir_tf.tar_file.path,
os.path.relpath(tar_path, tar_tf.tar_file.path))
if not os.path.isdir(tar_path):
tar_content = tar_tf.extractfile(tar_path)
dir_content = dir_tf.extractfile(dir_path)
self.assertEqual(tar_content, dir_content)
command = "rm -rf %s"
subprocess.call(shlex.split(command % tmp_dir))
subprocess.call(shlex.split(command % arc_path))
def test__assert_type_gzip_tar(self):
arc_path = insights_heartbeat()
with archives.TarExtractor() as tar_ex:
tar_ex._assert_type(arc_path, False)
sel | f.assertIn(tar_ex.content_type, archives.TarEx | tractor.TAR_FLAGS)
subprocess.call(shlex.split("rm -rf %s" % arc_path))
def test__assert_type_gzip_no_tar(self):
tmp_dir = tempfile.mkdtemp()
archive_path = os.path.join(tmp_dir, "file.log.gz")
with gzip.open(archive_path, 'wb') as f:
f.write("testing contents")
with archives.TarExtractor() as tar_ex:
with self.assertRaises(archives.InvalidArchive) as cm:
tar_ex._assert_type(archive_path, False)
self.assertEqual(cm.exception.msg, "No compressed tar archive")
class TestZipFileExtractor(unittest.TestCase):
def test_with_zip(self):
tmp_dir = tempfile.mkdtemp()
arc_path = insights_heartbeat()
command = "tar -a -x -f %s -C %s" % (arc_path, tmp_dir)
subprocess.call(shlex.split(command))
try:
os.unlink("/tmp/test.zip")
except:
pass
# stolen from zipfile.py:main
def _add_to_zip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, zipfile.ZIP_DEFLATED)
elif os.path.isdir(path):
if zippath:
zf.write(path, zippath)
for nm in os.listdir(path):
_add_to_zip(zf, os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
with zipfile.ZipFile("/tmp/test.zip", "w") as zf:
_add_to_zip(zf, tmp_dir, os.path.basename(tmp_dir))
try:
with archives.ZipExtractor() as ex:
ex.from_path("/tmp/test.zip")
self.assertFalse("foo" in ex.getnames())
self.assertTrue(any(f.endswith("/sys/kernel/kexec_crash_size") for f in ex.getnames()))
spec_mapper = SpecMapper(ex)
self.assertEquals(spec_mapper.get_content("hostname"), ["insights-heartbeat-9cd6f607-6b28-44ef-8481-62b0e7773614"])
finally:
os.unlink("/tmp/test.zip")
subprocess.call(shlex.split("rm -rf %s" % tmp_dir))
subprocess.call(shlex.split("rm -rf %s" % arc_path))
|
fulfilio/nereid-webshop-elastic-search | website.py | Python | bsd-3-clause | 2,063 | 0 | # -*- coding: utf-8 -*-
'''
website
'''
from trytond.pool import Pool, PoolMeta
from nereid import request, route, render_template
from pagination import ElasticPagination
__metaclass__ = PoolMeta
__all__ = ['Website']
class Website:
"Nereid Website"
__name__ = 'nereid.website'
@classmethod
def auto_complete(cls, phrase):
"""
This is a downstream implementation which uses elasticsearch to return
results for a query.
"""
Product = Pool().get('product.product')
config = Pool().get('elasticsearch.configuration')(1)
if not config.get_es_connection(timeout=5):
# NO ES fallback to default search
return super(Website, cls).auto_complete(phrase)
return Product._es_autocomplete(phrase)
@classmethod
@route('/search')
def quick_search(cls):
"""
This version of quick_search uses elasticsearch to build
search results for searches from the website.
"""
Product = Pool().get('product.product')
config = Pool().get('elasticsearch.configuration')(1)
| if not config.get_es_connection(timeout=5):
# NO ES fallback to default search
return super(Website, cls).quick_search()
page = request.args.get('page', 1, type=int)
phrase = request.ar | gs.get('q', '')
logger = Pool().get('elasticsearch.configuration').get_logger()
search_obj = Product._quick_search_es(phrase)
products = ElasticPagination(
Product.__name__, search_obj, page, Product.per_page
)
if products:
logger.info(
"Search for %s yielded in %d results." %
(phrase, products.count)
)
else:
logger.info(
"Search for %s yielded no results from elasticsearch." % phrase
)
return render_template(
'search-results.jinja',
products=products,
facets=products.result_set.facets
)
|
ptcrypto/p2pool-adaptive | p2pool/bitcoin/networks/titcoin.py | Python | gpl-3.0 | 1,188 | 0.004209 | import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
P2P_PREFIX = '25174c22'.decode('hex')
P2P_PORT = 8698
ADDRESS_VERSION = 0
RPC_PORT = 8697
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
(yield helper.check_genesis_block(bitcoind, '00000000bb82b1cbe86b5fe62967c13ff2e8cdabf68adeea2038289771c3491f')) and
not (yield bitcoind.rpc_getinfo())['testnet']
))
SUBSIDY_FUNC = lambda height: 69*69000000 >> (height + 1)//500000
POW_FUNC = data.hash256
BLOCK_PERIOD = | 60 # s
SYMBOL = 'TIT'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'titcoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/titcoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.titcoin'), 'titcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'https://blockexperts.com/tit/hash/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://blockexperts.com/tit/address/'
TX_EXPLORER_URL_PREFIX = 'http://blockexperts.com/ti | t/tx/'
SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**32 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 0.001e8
|
jleni/QRL | src/qrl/core/ChainManager.py | Python | mit | 23,847 | 0.003103 | # coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from typing import Optional, Tuple
from pyqrllib.pyqrllib import bin2hstr
from pyqryptonight.pyqryptonight import StringToUInt256, UInt256ToString
from qrl.core import config, BlockHeader
from qrl.core.AddressState import AddressState
from qrl.core.Block import Block
from qrl.core.BlockMetadata import BlockMetadata
from qrl.core.DifficultyTracker import DifficultyTracker
from qrl.core.GenesisBlock import GenesisBlock
from qrl.core.PoWValidator import PoWValidator
from qrl.core.txs.Transaction import Transaction
from qrl.core.txs.CoinBase import CoinBase
from qrl.core.TransactionPool import TransactionPool
from qrl.core.misc import logger
from qrl.crypto.Qryptonight import Qryptonight
from qrl.generated import qrl_pb2, qrlstateinfo_pb2
class ChainManager:
def __init__(self, state):
self._state = state
self.tx_pool = TransactionPool(None)
self._last_block = Block.deserialize(GenesisBlock().serialize())
self.current_difficulty = StringToUInt256(str(config.user.genesis_difficulty))
self.trigger_miner = False
self.lock = threading.RLock()
@property
def height(self):
with self.lock:
if not self._last_block:
return -1
return self._last_block.block_number
@property
def last_block(self) -> Block:
with self.lock:
return self._last_block
@property
def total_coin_supply(self):
with self.lock:
return self._state.total_coin_supply
def get_block_datapoint(self, headerhash):
with self.lock:
return self._state.get_block_datapoint(headerhash)
def get_cumulative_difficulty(self):
with self.lock:
last_block_metadata = self._state.get_block_metadata(self._last_block.headerhash)
return last_block_metadata.cumulative_difficulty
def get_block_by_number(self, block_number) -> Optional[Block]:
with self.lock:
return self._state.get_block_by_number(block_number)
def get_block_header_hash_by_number(self, block_number) -> Optional[bytes]:
with self.lock:
return self._state.get_block_header_hash_by_number(block_number)
def get_block(self, header_hash: bytes) -> Optional[Block]:
with self.lock:
return self._state.get_block(header_hash)
def get_address_balance(self, address: bytes) -> int:
with self.lock:
return self._state.get_address_balance(address)
def get_address_is_used(self, address: bytes) -> bool:
with self.lock:
return self._state.get_address_is_used(address)
def get_address_state(self, address: bytes) -> AddressState:
with self.lock:
return self._state.get_address_state(address)
def get_all_address_state(self):
with self.lock:
return self._state.get_all_address_state()
def get_tx_metadata(self, transaction_hash) -> list:
with self.lock:
return self._state.get_tx_metadata(transaction_hash)
def get_last_transactions(self):
with self.lock:
return self._state.get_last_txs()
def get_unconfirmed_transaction(self, transaction_hash) -> list:
with self.lock:
for tx_set in self.tx_pool.transactions:
tx = tx_set[1].transaction
if tx.txhash == transaction_hash:
return [tx, tx_set[1].timestamp]
if transaction_hash in self.tx_pool.pending_tx_pool_hash:
for tx_set in self.tx_pool.pending_tx_pool:
tx = tx_set[1].transaction
if tx.txhash == transaction_hash:
return [tx, tx_set[1].timestamp]
return []
def get_block_metadata(self, header_hash: bytes) -> Optional[BlockMetadata]:
with self.lock:
return self._state.get_block_metadata(header_hash)
def get_blockheader_and_metadata(self, block_number=0) -> Tuple:
with self.lock:
block_number = block_number or self.height # if both are non-zero, then block_number takes priority
result = (None, None)
block = self.get_block_by_number(block_number)
if block:
blockheader = block.blockheader
blockmetadata = self.get_block_metadata(blockheader.headerhash)
result = (blockheader, blockmetadata)
return result
def get_block_to_mine(self, miner, wallet_address) -> list:
with miner.lock: # Trying to acquire miner.lock to make sure pre_block_logic is not running
with self.lock:
last_block = self.last_block
last_block_metadata = self.get_block_metadata(last_block.headerhash)
return miner.get_block_to_mine(wallet_address,
self.tx_pool,
last_block,
last_block_metadata.block_difficulty)
def get_measurement(self, block_timestamp, parent_headerhash, parent_metadata: BlockMetadata):
with self.lock:
return self._state.get_measurement(block_timestamp, parent_headerhash, parent_metadata)
def get_block_size_limit(self, block: Block):
with self.lock:
return self._state.get_block_size_limit(block)
def get_block_is_duplicate(self, block: Block) -> bool:
with self.lock:
return self._state.get_block(block.headerhash) is not None
def validate_mining_non | ce(self, blockheader: BlockHeader, enable_logging=True):
with self.lock:
parent_metadata = self.get_block_metadata(blockheader.prev_headerhash)
parent_block = self._state.get_block(blockheader.prev_headerhash)
measurement = self.get_measurement(blockheader.timestamp, blockheader.prev_headerhash, parent_metadata)
diff, target | = DifficultyTracker.get(
measurement=measurement,
parent_difficulty=parent_metadata.block_difficulty)
if enable_logging:
logger.debug('-----------------START--------------------')
logger.debug('Validate #%s', blockheader.block_number)
logger.debug('block.timestamp %s', blockheader.timestamp)
logger.debug('parent_block.timestamp %s', parent_block.timestamp)
logger.debug('parent_block.difficulty %s', UInt256ToString(parent_metadata.block_difficulty))
logger.debug('diff %s', UInt256ToString(diff))
logger.debug('target %s', bin2hstr(target))
logger.debug('-------------------END--------------------')
if not PoWValidator().verify_input(blockheader.mining_blob, target):
if enable_logging:
logger.warning("PoW verification failed")
qn = Qryptonight()
tmp_hash = qn.hash(blockheader.mining_blob)
logger.warning("{}".format(bin2hstr(tmp_hash)))
logger.debug('%s', blockheader.to_json())
return False
return True
def get_headerhashes(self, start_blocknumber):
with self.lock:
start_blocknumber = max(0, start_blocknumber)
end_blocknumber = min(self._last_block.block_number,
start_blocknumber + 2 * config.dev.reorg_limit)
total_expected_headerhash = end_blocknumber - start_blocknumber + 1
node_header_hash = qrl_pb2.NodeHeaderHash()
node_header_hash.block_number = start_blocknumber
block = self._state.get_block_by_number(end_blocknumber)
block_headerhash = block.headerhash
node_header_hash.headerhashes.append(block_headerhash)
end_blocknumber -= 1
while end_blocknu |
saisankargochhayat/algo_quest | leetcode/64.MinSumPath/soln.py | Python | apache-2.0 | 951 | 0.025237 | class Solution:
def minPathSum(self, grid: List[List[int]]) -> int:
row = len(grid)
col = len(grid[0])
dp = [[0]*col for i in range(row)]
minPath = 0
return self.findPath(grid, row-1, col-1, dp)
def findPath(self, grid, i, j, dp):
#print(i,j, minPath)
if dp[i][j]:
return dp[i][j]
if i == 0 and j == 0:
dp[i][j] = grid[i][j]
return grid[i][j]
elif i == 0:
dp[i][j] = | grid[i][j] + self.findPath(grid, i,max(0,j-1), dp)
return dp[i][j] #element to left
elif j == 0:
return self.findPath(grid, max(0,i-1),j, dp)+grid[i][j] #element on top
el | se:
a = self.findPath(grid, i,max(0,j-1), dp)+grid[i][j] #element to left
b = self.findPath(grid, max(0,i-1),j, dp)+grid[i][j] #element on top
dp[i][j] = min(a,b)
return dp[i][j]
|
enthought/depsolver | depsolver/solver/tests/scenarios/rules_generator.py | Python | bsd-3-clause | 5,572 | 0.00341 | import os.path as op
import tempita
from depsolver._package_utils \
import \
parse_package_full_name
from depsolver.compat \
import \
OrderedDict
from depsolver.package \
import \
PackageInfo
from depsolver.requirement \
import \
Requirement
from depsolver.solver.rules_generator \
import \
RulesGenerator
from depsolver.bundled.traitlets \
import \
HasTraits, Instance
from depsolver.solver.tests.scenarios.common \
import \
COMMON_IMPORTS, BaseScenario, packages_list_to_php_json, \
job_to_php_constraints, run_php_scenarios
DATA = op.join(op.dirname(__file__), "data", "rules_generator")
P = PackageInfo.from_string
R = Requirement.from_string
TEMPLATE = """\
<?php
require {{bootstrap_path}};
{{common_imports}}
$loader = new ArrayLoader();
/* Remote repository definition */
$remote_repo_json = '
{{remote_repo_json_string}}
';
$packages = JsonFile::parseJson($remote_repo_json);
$remote_repo = new WritableArrayRepository();
foreach ($packages as $packageData) {
$package = $loader->load($packageData);
$remote_repo->addPackage($package);
}
/* Installed repository definition */
$repo_json = '
{{installed_repo_json_string}}
';
$packages = JsonFile::parseJson($repo_json);
$installed_repo = new WritableArrayRepository();
foreach ($packages as $packageData) {
$package = $loader->load($packageData);
$installed_repo->addPackage($package);
}
/* Pool definition */
$pool = new Pool();
$pool->addRepository($remote_repo);
$pool->addRepository($installed_repo);
$request = new Request($pool);
{{for operation, requirement_name, constraints in request}}
$constraints = array(
{{constraints}}
);
$request_constraints = new MultiConstraint($constraints);
$request->{{operation}}("{{requirement_name}}", $request_constraints);
{{endfor}}
class DebuggingSolver extends Solver
{
public fun | ction printRules(Request $request)
{
$this->jobs = $request->getJobs();
$this->se | tupInstalledMap();
$this->decisions = new Decisions($this->pool);
$this->rules = $this->ruleSetGenerator->getRulesFor($this->jobs, $this->installedMap);
$this->watchGraph = new RuleWatchGraph;
foreach ($this->rules as $rule) {
printf("%s\\n", $rule);
}
}
}
$policy = new DefaultPolicy();
$solver = new DebuggingSolver($policy, $pool, $installed_repo);
$solver->printRules($request);
"""
class RulesGeneratorScenario(HasTraits):
_base_scenario = Instance(BaseScenario)
@property
def remote_repository(self):
return self._base_scenario.remote_repository
@property
def installed_repository(self):
return self._base_scenario.installed_repository
@property
def pool(self):
return self._base_scenario.pool
@property
def request(self):
return self._base_scenario.request
@classmethod
def from_yaml(cls, filename):
base_scenario = BaseScenario.from_yaml(filename)
return cls(_base_scenario=base_scenario)
@classmethod
def from_data(cls, remote_packages, installed_packages, request_jobs):
base_scenario = BaseScenario.from_data(filename)
return cls(_base_scenario=base_scenario)
def compute_rules(self):
installed_map = OrderedDict()
for package in self.installed_repository.iter_packages():
installed_map[package.id] = package
rules_generator = RulesGenerator(self.pool, self.request, installed_map)
return list(rules_generator.iter_rules())
def to_php(self, filename="test_installed_map.php", composer_location=None):
if composer_location is None:
bootstrap_path = "__DIR__.'/src/bootstrap.php'"
else:
bootstrap_path = "'%s'" % op.join(composer_location, "src", "bootstrap.php")
template = tempita.Template(TEMPLATE)
remote_packages = self.remote_repository.list_packages()
installed_packages = self.installed_repository.list_packages()
variables = {
"bootstrap_path": bootstrap_path,
"remote_repo_json_string": packages_list_to_php_json(remote_packages),
"installed_repo_json_string": packages_list_to_php_json(installed_packages),
"request": [(job.job_type, job.requirement.name, job_to_php_constraints(job)) \
for job in self.request.jobs],
"common_imports": COMMON_IMPORTS,
}
with open(filename, "wt") as fp:
fp.write(template.substitute(variables))
def post_process(output):
"""Crappy function to convert php rule string to depsolver ones."""
lines = []
for line in output.splitlines():
new_parts = []
parts = [p.strip() for p in line[1:-1].split("|")]
for part in parts:
if part.startswith("-"):
part = part[1:-2]
name, version = parse_package_full_name(part)
new_part = "-" + "%s-%s" % (name, str(version))
else:
part = part[:-2]
name, version = parse_package_full_name(part)
new_part = "%s-%s" % (name, str(version))
new_parts.append(new_part)
lines.append("(" + " | ".join(new_parts) + ")")
lines.append("")
return "\n".join(lines)
if __name__ == "__main__":
data_directory = op.join(op.dirname(__file__), "data", "rules_generator")
run_php_scenarios(data_directory, RulesGeneratorScenario, post_process)
|
adazey/Muzez | libs/nltk/parse/__init__.py | Python | gpl-3.0 | 4,269 | 0.002342 | # Natural Language Toolkit: Parsers
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Steven Bird <stevenbird1@gmail.com>
# Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
#
"""
NLTK Parsers
Classes and interfaces for producing tree structures that represent
the internal organization of a text. This task is known as "parsing"
the text, and the resulting tree structures are called the text's
"parses". Typically, the text is a single sentence, and the tree
structure represents the syntactic structure of the sentence.
However, parsers can also be used in other domains. For example,
parsers can be used to derive the morphological structure of the
morphemes that make up a word, or to derive the discourse structure
for a set of utterances.
Sometimes, a single piece of text can be represented by more than one
tree structure. Texts represented by more than one tree structure are
called "ambiguous" texts. Note that there are actually two ways in
which a text can be ambiguous:
- The text has multiple correct parses.
- There is not enough information to decide which of several
candidate parses is correct.
However, the parser module does *not* distinguish these two types of
ambiguity.
The parser module defines ``ParserI``, a standard interface for parsing
texts; and two simple implementations of that interface,
``ShiftReduceParser`` and ``RecursiveDescentParser``. It also contains
three sub-modules for specialized kinds of parsing:
- ``nltk.parser.chart`` defines chart parsing, which uses dynamic
programming to efficiently parse texts.
- ``nltk.parser.probabilistic`` defines probabilistic parsing, which
associates a probability with each parse.
"""
from nltk.parse.api import ParserI
from nltk.parse.chart import (ChartParser, SteppingChartParser, TopDownChartParser,
BottomUpChartParser, BottomUpLeftCornerChartParser,
LeftCornerChartParser)
from nltk.parse.featurechart import (FeatureChartParser, FeatureTopDownChartParser,
FeatureBottomUpChartParser,
FeatureBottomUpLeftCornerChartParser)
from nltk.parse.earleychart import (IncrementalChartParser, EarleyChartParser,
IncrementalTopDownChartParser,
IncrementalBottomUpChartParser,
IncrementalBottomUpLeftCornerChartParser,
IncrementalLeftCornerChartParser,
FeatureIncrementalChartParser,
FeatureEarleyChartParser,
FeatureIncrementalTopDownChartParser,
FeatureIncrementalBottomUpChartParser,
FeatureIncrementalBottomUpLeftCornerChartParser)
from nltk.parse.pchart import (BottomUpProbabilisticChartParser, InsideChartParser,
RandomChartParser, UnsortedChartParser,
LongestChartParser)
from nltk.parse.recursivedescent import (RecursiveDescentParser,
SteppingRecursiveDescentParser)
from nltk.parse.shiftreduce import (ShiftReduceParser, SteppingShiftReduceParser)
from nltk.parse.util import load_parser, TestGrammar, extract_test_sentences
from nltk.parse.viterbi import ViterbiParser
from nltk.parse.dependencygraph import DependencyGraph
from nltk.parse.projectivedependencyparser import (Projec | tiveDependencyParser,
ProbabilisticProjectiveDependencyParser)
from nltk.parse.nonprojectivedependencyparser import (NonprojectiveDependencyParser,
NaiveBayesDependencyScorer,
| ProbabilisticNonprojectiveParser)
from nltk.parse.malt import MaltParser
from nltk.parse.evaluate import DependencyEvaluator
from nltk.parse.transitionparser import TransitionParser
from nltk.parse.bllip import BllipParser
|
gunan/tensorflow | tensorflow/python/ops/control_flow_v2_func_graphs.py | Python | apache-2.0 | 2,205 | 0.004082 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""FuncGraphs for V2 control flow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
class CondBranchFuncGraph(func_graph.FuncGraph):
"""FuncGraph for branches of tf.cond().
This is used to distinguish cond branches from other functions.
"""
def __init__(self, *args, **kwargs):
super(CondBranchFuncGraph, self)._ | _init__(*args, **kwargs)
if ops.executing_eagerly_outside_functions():
func_graph.override_func_graph_name_scope(
self, self.outer_graph.get_name_scope())
class WhileCondFuncGraph(func_graph.FuncGraph):
"""FuncGraph for the condition of tf.while_loop().
This is used to distinguish while conditions from other functions.
"""
def __init__(self, *args, **kwargs):
super(WhileCondFuncGraph, self).__init__(*args, **kwa | rgs)
if ops.executing_eagerly_outside_functions():
func_graph.override_func_graph_name_scope(
self, self.outer_graph.get_name_scope())
class WhileBodyFuncGraph(func_graph.FuncGraph):
"""FuncGraph for the body of tf.while_loop().
This is used to distinguish while bodies from other functions.
"""
def __init__(self, *args, **kwargs):
super(WhileBodyFuncGraph, self).__init__(*args, **kwargs)
if ops.executing_eagerly_outside_functions():
func_graph.override_func_graph_name_scope(
self, self.outer_graph.get_name_scope())
|
lkesteloot/clock | frame.py | Python | apache-2.0 | 4,043 | 0.000989 |
# Copyright 2015 Lawrence Kesteloot
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import math
from vector import Vector
from config import DPI, TAU, TIGHT_LARGE_BOLT_RADIUS, WALL_ANCHOR_RADIUS, WALL_ANCHOR_OFFSET
import draw
PADDING = DPI*1
CORNER_RADIUS = DPI*0.25
CORNER_POINTS = 32
FOOT_WIDTH = PADDING - 2*CORNER_RADIUS
ADD_FEET = False
ADD_WALL_ANCHOR_HOLES = False
def generate(data, color):
# Deduce size and position of frame, and its ho | les, from the existing data.
minX = DPI*100
minY = DPI*100
maxX = -DPI*100
maxY = -DPI*100
floorY = -DPI*100
holes = []
for piece in data["pieces"]:
cx = piece["cx"]
cy = piece["cy"]
minX = min(minX, cx)
minY = min(minY, cy)
maxX = max(maxX, cx)
maxY = max(maxY, cy)
for v in piece["points"]:
floorY = max(floorY, | cy + v.y)
for hole in holes:
if hole["cx"] == cx and hole["cy"] == cy:
break
else:
holes.append({
"cx": cx,
"cy": cy,
"r": TIGHT_LARGE_BOLT_RADIUS,
})
# Add hole in lower-left since there are no axles there.
holes.append({
"cx": minX,
"cy": maxY,
"r": TIGHT_LARGE_BOLT_RADIUS,
})
sys.stderr.write("The frame has %d holes.\n" % len(holes))
# Expand margin.
minX -= PADDING
minY -= PADDING
maxX += PADDING
maxY += PADDING
floorY += PADDING
# Draw frame.
P = []
P.append(Vector(minX, minY))
P.append(Vector(maxX, minY))
if ADD_FEET:
P.append(Vector(maxX, floorY))
P.append(Vector(maxX - FOOT_WIDTH, floorY))
P.append(Vector(maxX - FOOT_WIDTH, maxY))
P.append(Vector(minX + FOOT_WIDTH, maxY))
P.append(Vector(minX + FOOT_WIDTH, floorY))
P.append(Vector(minX, floorY))
else:
P.append(Vector(maxX, maxY))
P.append(Vector(minX, maxY))
# Do not close this, the round_corners() function does it.
P = draw.round_corners(P, CORNER_RADIUS, CORNER_POINTS)
width = (maxX - minX)/DPI
height = ((floorY if ADD_FEET else maxY) - minY)/DPI
sys.stderr.write("Frame is %.1fx%.1f inches\n" % (width, height))
if width > 24 or height > 18:
sys.stderr.write("------------------ FRAME TOO LARGE -----------------------\n")
# Front piece.
piece = {
"cx": 0,
"cy": 0,
"cz": 9,
"type": "frame",
"color": color,
"speed": 0,
"points": P,
"holes": holes,
}
data["pieces"].append(piece)
# Back piece.
piece = piece.copy()
# Add holes for hanging frame to wall.
holes = holes[:]
if ADD_WALL_ANCHOR_HOLES:
# XXX Can probably delete this.
piece["holes"] = holes
holes.append({
"cx": minX + WALL_ANCHOR_OFFSET + PADDING,
"cy": minY + PADDING,
"r": WALL_ANCHOR_RADIUS,
})
holes.append({
"cx": maxX - WALL_ANCHOR_OFFSET - PADDING,
"cy": minY + PADDING,
"r": WALL_ANCHOR_RADIUS,
})
holes.append({
"cx": minX + WALL_ANCHOR_OFFSET + PADDING,
"cy": maxY - PADDING,
"r": WALL_ANCHOR_RADIUS,
})
holes.append({
"cx": maxX - WALL_ANCHOR_OFFSET - PADDING,
"cy": maxY - PADDING,
"r": WALL_ANCHOR_RADIUS,
})
piece["cz"] = -3
data["pieces"].append(piece)
|
kartoza/jakarta-flood-maps | django_project/core/settings/dev.py | Python | bsd-2-clause | 3,007 | 0 | from .project import * # noqa
# Set debug to True for development
DEBUG = True
TEMPLATE_DEBUG = DEBUG
LOGGING_OUTPUT_ENABLED = DEBUG
LOGGING_LOG_SQL = DEBUG
PIPELINE_ENABLED = False
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Disable caching while in development
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
# define output formats
'verbose': {
'format': (
'%(levelname)s %(name)s %(asctime)s %(module)s %(proces | s)d '
'%(thread)d %(message)s')
},
'simple': {
'format': (
'%(name)s %(levelname)s %(filename)s L%(linen | o)s: '
'%(message)s')
},
},
'handlers': {
# console output
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
# 'logfile': {
# 'class': 'logging.FileHandler',
# 'filename': '/tmp/app-dev.log',
# 'formatter': 'simple',
# 'level': 'DEBUG',
# }
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'INFO', # switch to DEBUG to show actual SQL
},
# example app logger
# 'app.module': {
# 'level': 'INFO',
# 'handlers': ['logfile'],
# # propagate is True by default, which proppagates logs upstream
# 'propagate': False
# }
},
# root logger
# non handled logs will propagate to the root logger
'root': {
'handlers': ['console'],
'level': 'WARNING'
}
}
# set up devserver if installed
try:
import devserver # noqa
INSTALLED_APPS += (
'devserver',
)
# more details at https://github.com/dcramer/django-devserver#configuration
DEVSERVER_DEFAULT_ADDR = '0.0.0.0'
DEVSERVER_DEFAULT_PORT = '8000'
DEVSERVER_AUTO_PROFILE = False # use decorated functions
DEVSERVER_TRUNCATE_SQL = True # squash verbose output, show from/where
DEVSERVER_MODULES = (
# uncomment if you want to show every SQL executed
# 'devserver.modules.sql.SQLRealTimeModule',
# show sql query summary
'devserver.modules.sql.SQLSummaryModule',
# Total time to render a request
'devserver.modules.profile.ProfileSummaryModule',
# Modules not enabled by default
# 'devserver.modules.ajax.AjaxDumpModule',
# 'devserver.modules.profile.MemoryUseModule',
# 'devserver.modules.cache.CacheSummaryModule',
# see documentation for line profile decorator examples
# 'devserver.modules.profile.LineProfilerModule',
# show django session information
# 'devserver.modules.request.SessionInfoModule',
)
except ImportError:
pass
|
JshWright/home-assistant | homeassistant/components/sensor/cpuspeed.py | Python | apache-2.0 | 2,256 | 0 | """
Support for displaying the current CPU speed.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.cpuspeed/
"""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['py-cpuinfo==3.2.0']
_LOGGER = logging.getLogger(__name__)
ATTR_BRAND = 'Brand'
ATTR_HZ = 'GHz Advertised'
ATTR_ARCH = 'arch'
DEFAULT_NAME = 'CPU speed'
ICON = 'mdi:pulse'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
# pylint: disable=unused-variable
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the CPU speed sensor."""
nam | e = config.get(CONF_NAME)
add_devices([CpuSpeedSensor(name)])
class CpuSpeedSensor(Entity):
"""Representation of a CPU sensor."""
def __init__(self, name):
"""Initialize the sensor."""
self._name = name
self._state = None
self._unit_of_measurement = 'GHz'
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self): |
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.info is not None:
return {
ATTR_ARCH: self.info['arch'],
ATTR_BRAND: self.info['brand'],
ATTR_HZ: round(self.info['hz_advertised_raw'][0]/10**9, 2)
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the state."""
from cpuinfo import cpuinfo
self.info = cpuinfo.get_cpu_info()
self._state = round(float(self.info['hz_actual_raw'][0])/10**9, 2)
|
cajone/pychess | lib/pychess/widgets/analyzegameDialog.py | Python | gpl-3.0 | 7,913 | 0.003033 | from __future__ import absolute_import
import time
import threading
from gi.repository import Gtk
from . import gamewidget
from pychess.Utils.const import HINT, SPY, BLACK, WHITE
from pychess.System import conf, fident
from pychess.System import uistuff
from pychess.System.idle_add import idle_add
from pychess.System.Log import log
from pychess.Utils import prettyPrintScore
from pychess.Utils.Move import listToMoves
from pychess.Utils.lutils.lmove import ParsingError
from pychess.Players.engineNest import discoverer
from pychess.widgets.preferencesDialog import anal_combo_get_value, anal_combo_set_value
from pychess.widgets.InfoBar import InfoBarMessage, InfoBarMessageButton
from pychess.widgets import InfoBar
widgets = uistuff.GladeWidgets("analyze_game.glade")
stop_event = threading.Event()
firstRun = True
def run():
global firstRun
if firstRun:
initialize()
firstRun = False
stop_event.clear()
widgets["analyze_game"].show()
widgets["analyze_game"].present()
def initialize():
uistuff.keep(widgets["fromCurrent"], "fromCurrent", first_value=True)
uistuff.keep(widgets["shouldBlack"], "shouldBlack", first_value=True)
uistuff.keep(widgets["shouldWhite"], "shouldWhite", first_value=True)
uistuff.keep(widgets["threatPV"], "threatPV")
uistuff.keep(widgets["showEval"], "showEval")
uistuff.keep(widgets["showBlunder"], "showBlunder", first_value=True)
uistuff.keep(widgets["max_analysis_spin"], "max_analysis_spin", first_value=3)
uistuff.keep(widgets["variation_threshold_spin"], "variation_threshold_spin", first_value=50)
# Analyzing engines
uistuff.createCombo(widgets["ana_combobox"], name="ana_combobox")
from pychess.widgets import newGameDialog
@idle_add
def update_analyzers_store(discoverer):
data = [(item[0], item[1]) for item in newGameDialog.analyzerItems]
uistuff.updateCombo(widgets["ana_combobox"], data)
discoverer.connect_after("all_engines_discovered", update_analyzers_store)
update_analyzers_store(discoverer)
uistuff.keep(widgets["ana_combobox"], "ana_combobox", anal_combo_get_value,
lambda combobox, value: anal_combo_set_value(combobox, value, "hint_mode",
"analyzer_check", HINT))
def hide_window(button, *args):
widgets["analyze_game"].hide()
return True
def abort():
stop_event.set()
widgets["analyze_game"].hide()
def run_analyze(button, *args):
gmwidg = gamewidget.cur_gmwidg()
gamemodel = gmwidg.gamemodel
old_check_value = conf.get("analyzer_check", True)
conf.set("analyzer_check", True)
if HINT not in gamemodel.spectators:
gamemodel.start_analyzer(HINT)
analyzer = gamemodel.spectators[HINT]
gmwidg.menuitems["hint_mode"].active = True
threat_PV = conf.get("ThreatPV", False)
if threat_PV:
old_inv_check_value = conf.get("inv_analyzer_check", True)
conf.set("inv_analyzer_check", True)
if SPY not in gamemodel.spectators:
gamemodel.start_analyzer(SPY)
inv_analyzer = gamemodel.spectators[SPY]
gmwidg.menuitems["spy_mode"].active = True
title = _("Game analyzing in progress...")
text = _("Do you want to abort it?")
content = InfoBar.get_message_content(title, text, Gtk.STOCK_DIALOG_QUESTION)
def response_cb(infobar, response, message):
conf.set("analyzer_check", old_check_value)
if threat_PV:
conf.set("inv_analyzer_check", old_inv_check_value)
message.dismiss()
abort()
message = InfoBarMessage(Gtk.MessageType.QUESTION, content, response_cb)
message.add_button(InfoBarMessageButton(_("Abort"), Gtk.ResponseType.CANCEL))
gmwidg.replaceMessages(message)
def analyse_moves():
should_black = conf.get("shouldBlack", True)
should_white = conf.get("shouldWhite", True)
from_current = conf.get("fromCurrent", True)
start_ply = gmwidg.board.view.shown if from_current else 0
move_time = int(conf.get("max_analysis_spin", 3))
threshold = int(conf.get("variation_threshold_spin", 50))
for board in gamemodel.boards[start_ply:]:
if stop_event.is_set():
break
@idle_add
def do():
gmwidg.board.view.setShownBoard(board)
do()
analyzer.setBoard(board)
if threat_PV:
inv_analyzer.setBoard(board)
time.sleep(move_time + 0.1)
ply = board.ply
color = (ply - 1) % 2
if ply - 1 in gamemodel.scores and ply in gamemodel.scores and (
(color == BLACK and should_black) or (color == WHITE and should_white)):
oldmoves, oldscore, olddepth = gamemodel.scores[ply - 1]
oldscore = oldscore * -1 if color == BLACK else oldscore
score_str = prettyPrintScore(oldscore, olddepth)
moves, score, depth = gamemodel.scores[ply]
score = score * -1 if color == WHITE else score
diff = score - oldscore
if (diff > threshold and color == BLACK) or (diff < -1 * threshold and color == WHITE):
if threat_PV:
try:
if ply - 1 in gamemodel.spy_scores:
oldmoves0, oldscore0, olddepth0 = gamemodel.spy_scores[ply - 1]
score_str0 = prettyPrintScore(oldscore0, olddepth0)
pv0 = listToMoves(gamemodel.boards[ply - 1], ["--"] + oldmoves0, validate=True)
if len(pv0) > 2:
gamemodel.add_variation(gamemodel.boards[ply - 1], pv0,
comment="Treatening", score=score_str0)
except ParsingError as e: |
# ParsingErrors may happen when parsing "old" lines from
# analyzing engines, which haven't yet noticed their new tasks
log.debug("__parseLine: Ignored (%s) from analyzer: ParsingError%s" %
(' '.join(oldmoves), e))
| try:
pv = listToMoves(gamemodel.boards[ply - 1], oldmoves, validate=True)
gamemodel.add_variation(gamemodel.boards[ply - 1], pv, comment="Better is", score=score_str)
except ParsingError as e:
# ParsingErrors may happen when parsing "old" lines from
# analyzing engines, which haven't yet noticed their new tasks
log.debug("__parseLine: Ignored (%s) from analyzer: ParsingError%s" %
(' '.join(oldmoves), e))
widgets["analyze_game"].hide()
widgets["analyze_ok_button"].set_sensitive(True)
conf.set("analyzer_check", old_check_value)
if threat_PV:
conf.set("inv_analyzer_check", old_inv_check_value)
message.dismiss()
t = threading.Thread(target=analyse_moves, name=fident(analyse_moves))
t.daemon = True
t.start()
hide_window(None)
return True
widgets["analyze_game"].connect("delete-event", hide_window)
widgets["analyze_cancel_button"].connect("clicked", hide_window)
widgets["analyze_ok_button"].connect("clicked", run_analyze)
|
Neuvoo/legacy-portage | pym/portage/tests/ebuild/test_array_fromfile_eof.py | Python | gpl-2.0 | 1,023 | 0.028348 | # Copyright 2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import array
import tempfile
from portage import | _unicode_decode
from portage import _unicode_encode
from portage.tests import TestCase
class ArrayFromfileEofTestCase(TestCase):
def testArrayFromfileEof(self):
# This tests if the following python issue is fixed
# in the currently running version of python:
# http://bugs.python.org/issue5334
input_data = "an arbitrary string"
input_bytes = _unicode_encode(in | put_data,
encoding='utf_8', errors='strict')
f = tempfile.TemporaryFile()
f.write(input_bytes)
f.seek(0)
data = []
eof = False
while not eof:
a = array.array('B')
try:
a.fromfile(f, len(input_bytes) + 1)
except (EOFError, IOError):
# python-3.0 lost data here
eof = True
if not a:
eof = True
else:
data.append(_unicode_decode(a.tostring(),
encoding='utf_8', errors='strict'))
f.close()
self.assertEqual(input_data, ''.join(data))
|
chrishales/plot3d | task_plot3d.py | Python | bsd-3-clause | 18,140 | 0.016207 | from taskinit import *
from mpl_toolkits.mplot3d import axes3d, Axes3D
import matplotlib.pyplot as plt
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.cm as cm
import numpy as np
from pylab import ion,ioff
# plot3d is released under a BSD 3-Clause License
# See LICENSE for details
# HISTORY:
# 1.0 12Jul2014 Initial version.
# 1.1 04Aug2014 Fixed up time axis problem; correlation selection improved.
# 1.2 15Aug2014 Added uvrange selection.
# 1.3 25Aug2014 Bug fix: removed vmin from plot_surface.
# 1.4 01Oct2015 Added explicit handling for linear feed basis.
# 1.5 24Oct2016 Minor help file fixes, no change to code
#
def plot3d(vis,fid,datacolumn,corr,uvrange,plotall,spw,timecomp,chancomp,clipamp,outpng):
#
# Task plot3d
#
# Quickly inspect data for RFI by plotting time vs frequency vs amplitude
# Christopher A. Hales
#
# Version 1.5 (tested with CASA Version 4.7.0)
# 24 October 2016
casalog.origin('plot3d')
# channel to frequency conversion
tb.open(vis+'/SPECTRAL_WINDOW')
vtble=tb.getcol('CHAN_FREQ')
tb.close
nspw=vtble.shape[1]
# Get mapping between correlation string and number.
# Assume they don't change throughout observation.
# This is clunky...
tb.open(vis+'/DATA_DESCRIPTION')
if plotall:
# Get id of a spw in the data, just grab first one within the first
# scan on the chosen field so that some statistics can be obtained.
# Note: I won't assume that spw specifies data_desc_id in the main table, even
# though in most cases it probably does. Probably overkill given the lack
# of checks done elsewhere in this code...
# later we will gather scan information by looking at
# a single spw and assuming it represents all spw's
ms.open(vis)
ms.msselect({'field':str(fid)})
tempddid=ms.getdata(["DATA_DESC_ID"])['data_desc_id'][0]
ms.close
spw=tb.getcell('SPECTRAL_WINDOW_ID',tempddid)
polid=tb.getcell('POLARIZATION_ID',tempddid)
else:
temptb=tb.query('SPECTRAL_WINDOW_ID='+str(spw))
polid=temptb.getcell('POLARIZATION_ID')
tb.close
tb.open(vis+'/POLARIZATION')
npol=tb.getcell('NUM_CORR',polid)
tb.close
if npol == 2:
if corr == 'RR' or corr == 'XX':
corrID = 0
elif corr == 'LL' or corr == 'YY':
corrID = 1
else:
casalog.post('*** plot3d error: selected correlation doesn\'t exist. Terminating.', 'ERROR')
return
elif npol == 4:
if corr == 'RR' or corr == 'XX':
corrID = 0
elif corr == 'RL' or corr == 'XY':
corrID = 1
elif corr == 'LR' or corr == 'YX':
corrID = 2
elif corr == 'LL' or corr == 'YY':
corrID = 3
else:
casalog.post('*** plot3d error: selected correlation doesn\'t exist. Terminating.', 'ERROR')
return
else:
casalog.post('*** plot3d error: see the code, this is a weird error! Terminating.', 'ERROR')
corrSTR = corr
corr = corrID
# calculate number of effective channels per spw
# I assume that the end channels of each spw have been flagged.
# Force individual channels to remain at either end of spw,
# in order to ensure amplitudes are zero in between
# non-contiguous spw's. This will also ensure that it is
# easier to see spw boundaries in between contiguous spw's.
nchan = int(np.floor((vtble.shape[0]-2)/float(chancomp)))+2
# guard against the user inputting infinite chancomp
if nchan == 2:
nchan = 3
if plotall:
# I don't make any effort to set the amplitude to
# zero in the gaps between spw's (ie if spw's are not
# contiguous) because I will assume that flagging of
# spw edge channels has already taken place. Thus
# there is no need to pad spw's with extra channels
# if they happen to sit next to a gap in frequency
# coverage. For a more general code this would not
# be appropriate.
N=np.zeros(nchan*nspw)
t=0
for i in range(nspw):
# the following copied from single-spw "else" part below
k=0
# 1st channel in spw
N[t] = vtble[k,i]/1e6
t += 1
k += 1
# middle channels
# check if we are in the last block
while k+2*chancomp-1 <= vtble.shape[0]-2:
for h in range(chancomp):
N[t] = N[t] + vtble[k+h,i]
N[t] = N[t]/1e6/chancomp
t += 1
k += chancomp
# for the last block, just combine everything remaining
for h in range(k,vtble.shape[0]-1):
N[t] = N[t] + vtble[h,i]
N[t] = N[t]/1e6/len(range(k,vtble.shape[0]-1))
t += 1
# last channel in spw
N[t] = vtble[vtble.shape[0]-1,i]/1e6
t += 1
## TESTING: get regular channel data to compare
#Q=np.zeros([vtble.shape[0]*nspw])
#t=0
#for i in range(nspw):
# for k in range(vtble.shape[0]):
# Q[t] = vtble[k,i]/1e6
# t += 1
else:
N=np.zeros(nchan)
t=0
k=0
# 1st channel in spw
N[t] = vtble[k,spw]/1e6
t += 1
k += 1
# middle channels
# check if we are in the last block
while k+2*chancomp-1 <= vtble.shape[0]-2:
for h in range(chancomp):
N[t] = N[t] + vtble[k+h,spw]
N[t] = N[t]/1e6/chancomp
t += 1
k += chancomp
# for the last block, just combine everything remaining
for h in range(k,vtble.shape[0]-1):
N[t] = N[t] + vtble[h,spw]
N[t] = N[t]/1e6/len(range(k,vtble.shape[0]-1))
t += 1
# last channel in spw
N[t] = vtble[vtble.shape[0]-1,spw]/1e6
## TESTING: get regular channel data to compare
#Q=np.zeros(vtble.shape[0])
#t=0
#for k in range(vtble.shape[0]):
# Q[t] = vtble[k,spw]/1e6
# t += 1
ms.open(vis)
# assume time is same for each spw
# this is not the most efficient place in the code for this bit, meh
ms.reset()
ms.msselect({'field':str(fid),'spw':str(spw)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
# get the raw timestamps
Z=ms.getdata('time')['time']
# get the unique timestamps and nbaselines for each timestamp
# (don't assume the same baselines are available in each time step)
temptime = np.unique(Z)
nbaselines = []
for i in range(len(temptime)):
nbaselines.append(len(Z[Z==temptime[i]]))
# Get scan summary in prep for calculating time steps.
# Note that CASA currently repor | ts all spw's in the
# scan summary, rather than the 1 selected | above. meh
scan_summary = ms.getscansummary()
scan_list = []
for scan in scan_summary:
if scan_summary[scan]['0']['FieldId'] == fid:
scan_list.append(int(scan))
scan_list.sort()
# get integration time in minutes; assume it doesn't change in
# any way throughout the observation, ie between spw's, etc
inttime=scan_summary[str(scan_list[0])]['0']['IntegrationTime'] / 60.0
# Calculate number of true time steps per scan.
# In the code below, a dummy timestep will be added at each
# end of each scan to ensure amplitudes are zero in between
# non-contiguous scans. This will also ensure that it is
# easier to see scan boundaries in between contiguous
# scans. The 1st and last timestamp do not contribute to
# the time compression stuff.
# Also calculate effective time steps per scan, so that
# I can call the variable effntime...!
scan_ntime = []
scan_effntime = []
t = 0
fo |
valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/rte.py | Python | gpl-3.0 | 5,026 | 0.028884 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_HTTPError
from ..utils import (
float_or_none,
parse_iso8601,
str_or_none,
try_get,
unescapeHTML,
url_or_none,
ExtractorError,
)
class RteBaseIE(InfoExtractor):
def _real_extract(self, url):
item_id = self._match_id(url)
info_dict = {}
formats = []
ENDPOINTS = (
'https://feeds.rasset.ie/rteavgen/player/playlist?type=iptv&format=json&showId=',
'http://www.rte.ie/rteavgen/getplaylist/?type=web&format=json&id=',
)
for num, ep_url in enumerate(ENDPOINTS, start=1):
try:
data = self._download_json(ep_url + item_id, item_id)
except ExtractorError as ee:
if num < len(ENDPOINTS) or formats:
continue
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 404:
error_info = self._parse_json(ee.cause.read().decode(), item_id, fatal=False)
if error_info:
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, error_info['message']),
expected=True)
raise
# NB the string values in the JSON are stored using XML escaping(!)
show = try_get(data, lambda x: x['shows'][0], dict)
if not show:
continue
if not info_dict:
title = unescapeHTML(show['title'])
description = unescapeHTML(show.get('description'))
thumbnail = show.get('thumbnail')
duration = float_or_none(show.get('duration'), 1000)
timestamp = parse_iso8601(show.get('published'))
info_dict = {
'id': item_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'timestamp': timestamp,
'duration': duration,
}
mg = try_get(show, lambda x: x['media:group'][0], dict)
if not mg:
continue
if mg.get('url'):
m = re.match(r'(?P<url>rtmpe?://[^/]+)/(?P<app>.+)/(?P<playpath>mp4:.*)', mg['url'])
if m:
m = m.groupdict()
formats.append({
'url': m['url'] + '/' + m['app'],
'app': m['app'],
'play_path': m['playpath'],
'player_url': url,
'ext': 'flv',
'format_id': 'rtmp',
})
if mg.get('hls_server') and mg.get('hls_url'):
formats.extend(self._extract_m3u8_formats(
mg['hls_server'] + mg['hls_url'], item_id, 'mp4',
entry_protocol='m3u8_native', m3u8_id='hls', fatal=False))
if mg.get('hds_server') and mg.get('hds_url'):
formats.extend(self._extract_f4m_formats(
mg['hds_server'] + mg['hds_url'], item_id,
f4m_id='hds', fatal=False))
mg_rte_server = str_or_none(mg.get('rte:server'))
mg_url = str_or_no | ne(mg.get('url'))
if mg_rte_server and mg_url:
hds_url = url_or_none(mg_rte_server + mg_url)
if hds_url:
formats.extend(self._extract_f4m_formats(
hds_url, item_id, f4m_id='hds', fatal=False))
self._sort_formats(formats)
info_dict['formats'] = formats
return info_dict
class RteIE(RteBaseIE):
IE_NAME = 'rte'
IE_DESC = 'Raidió Teilifís Éireann TV'
_VALID_URL = r'https?://(?:www\.)?rte\.ie/pl | ayer/[^/]{2,3}/show/[^/]+/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.rte.ie/player/ie/show/iwitness-862/10478715/',
'md5': '4a76eb3396d98f697e6e8110563d2604',
'info_dict': {
'id': '10478715',
'ext': 'mp4',
'title': 'iWitness',
'thumbnail': r're:^https?://.*\.jpg$',
'description': 'The spirit of Ireland, one voice and one minute at a time.',
'duration': 60.046,
'upload_date': '20151012',
'timestamp': 1444694160,
},
}
class RteRadioIE(RteBaseIE):
IE_NAME = 'rte:radio'
IE_DESC = 'Raidió Teilifís Éireann radio'
# Radioplayer URLs have two distinct specifier formats,
# the old format #!rii=<channel_id>:<id>:<playable_item_id>:<date>:
# the new format #!rii=b<channel_id>_<id>_<playable_item_id>_<date>_
# where the IDs are int/empty, the date is DD-MM-YYYY, and the specifier may be truncated.
# An <id> uniquely defines an individual recording, and is the only part we require.
_VALID_URL = r'https?://(?:www\.)?rte\.ie/radio/utils/radioplayer/rteradioweb\.html#!rii=(?:b?[0-9]*)(?:%3A|:|%5F|_)(?P<id>[0-9]+)'
_TESTS = [{
# Old-style player URL; HLS and RTMPE formats
'url': 'http://www.rte.ie/radio/utils/radioplayer/rteradioweb.html#!rii=16:10507902:2414:27-12-2015:',
'md5': 'c79ccb2c195998440065456b69760411',
'info_dict': {
'id': '10507902',
'ext': 'mp4',
'title': 'Gloria',
'thumbnail': r're:^https?://.*\.jpg$',
'description': 'md5:9ce124a7fb41559ec68f06387cabddf0',
'timestamp': 1451203200,
'upload_date': '20151227',
'duration': 7230.0,
},
}, {
# New-style player URL; RTMPE formats only
'url': 'http://rte.ie/radio/utils/radioplayer/rteradioweb.html#!rii=b16_3250678_8861_06-04-2012_',
'info_dict': {
'id': '3250678',
'ext': 'flv',
'title': 'The Lyric Concert with Paul Herriott',
'thumbnail': r're:^https?://.*\.jpg$',
'description': '',
'timestamp': 1333742400,
'upload_date': '20120406',
'duration': 7199.016,
},
'params': {
# rtmp download
'skip_download': True,
},
}]
|
kristofclaes/vigenere | src/vigenere/__init__.py | Python | mit | 262 | 0 | __v | ersion__ = '0.0.1'
__title__ = 'vigenere'
__description__ = 'Enciphe | ring and deciphering using the Vigenère algorithm.'
__uri__ = 'https://github.com/kristofclaes/vigenere'
__author__ = 'Kristof Claes'
__email__ = 'kristof.c@gmail.com'
__license__ = 'MIT'
|
rhyolight/nupic | tests/unit/nupic/algorithms/spatial_pooler_py_api_test.py | Python | agpl-3.0 | 9,840 | 0.009451 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from mock import Mock, patch, ANY, call
import numpy
import cPickle as pickle
import unittest2 as unittest
from nupic.bindings.math import GetNTAReal
from nupic.bindings.algorithms import SpatialPooler
realType = GetNTAReal()
uintType = "uint32"
class SpatialPoolerAPITest(unittest.TestCase):
"""Tests for SpatialPooler public API"""
def setUp(self):
self.sp = SpatialPooler(columnDimensions=[5], inputDimensions=[5])
def testCompute(self):
# Check that there are no errors in call to compute
inputVector = numpy.ones(5, dtype=uintType)
activeArray = numpy.zeros(5, dtype=uintType)
self.sp.compute(inputVector, True, activeArray)
def testGetUpdatePeriod(self):
| inParam = 1234
self.sp.setUpdatePeriod(inParam)
outParam = self.sp.getUpdatePeriod()
self.assertEqual(inParam, outParam)
def testGetPotentialRadius(self):
inParam = 56
self.sp.setPotentialRadius(inParam)
outParam = self.sp.getPotentialRadius()
self.assertEqu | al(inParam, outParam)
def testGetPotentialPct(self):
inParam = 0.4
self.sp.setPotentialPct(inParam)
outParam = self.sp.getPotentialPct()
self.assertAlmostEqual(inParam, outParam)
def testGetGlobalInhibition(self):
inParam = True
self.sp.setGlobalInhibition(inParam)
outParam = self.sp.getGlobalInhibition()
self.assertEqual(inParam, outParam)
inParam = False
self.sp.setGlobalInhibition(inParam)
outParam = self.sp.getGlobalInhibition()
self.assertEqual(inParam, outParam)
def testGetNumActiveColumnsPerInhArea(self):
inParam = 7
self.sp.setNumActiveColumnsPerInhArea(inParam)
outParam = self.sp.getNumActiveColumnsPerInhArea()
self.assertEqual(inParam, outParam)
def testGetLocalAreaDensity(self):
inParam = 0.4
self.sp.setLocalAreaDensity(inParam)
outParam = self.sp.getLocalAreaDensity()
self.assertAlmostEqual(inParam, outParam)
def testGetStimulusThreshold(self):
inParam = 89
self.sp.setStimulusThreshold(inParam)
outParam = self.sp.getStimulusThreshold()
self.assertEqual(inParam, outParam)
def testGetInhibitionRadius(self):
inParam = 4
self.sp.setInhibitionRadius(inParam)
outParam = self.sp.getInhibitionRadius()
self.assertEqual(inParam, outParam)
def testGetDutyCyclePeriod(self):
inParam = 2020
self.sp.setDutyCyclePeriod(inParam)
outParam = self.sp.getDutyCyclePeriod()
self.assertEqual(inParam, outParam)
def testGetBoostStrength(self):
inParam = 78
self.sp.setBoostStrength(inParam)
outParam = self.sp.getBoostStrength()
self.assertEqual(inParam, outParam)
def testGetIterationNum(self):
inParam = 999
self.sp.setIterationNum(inParam)
outParam = self.sp.getIterationNum()
self.assertEqual(inParam, outParam)
def testGetIterationLearnNum(self):
inParam = 666
self.sp.setIterationLearnNum(inParam)
outParam = self.sp.getIterationLearnNum()
self.assertEqual(inParam, outParam)
def testGetSpVerbosity(self):
inParam = 2
self.sp.setSpVerbosity(inParam)
outParam = self.sp.getSpVerbosity()
self.assertEqual(inParam, outParam)
def testGetSynPermTrimThreshold(self):
inParam = 0.7
self.sp.setSynPermTrimThreshold(inParam)
outParam = self.sp.getSynPermTrimThreshold()
self.assertAlmostEqual(inParam, outParam)
def testGetSynPermActiveInc(self):
inParam = 0.567
self.sp.setSynPermActiveInc(inParam)
outParam = self.sp.getSynPermActiveInc()
self.assertAlmostEqual(inParam, outParam)
def testGetSynPermInactiveDec(self):
inParam = 0.123
self.sp.setSynPermInactiveDec(inParam)
outParam = self.sp.getSynPermInactiveDec()
self.assertAlmostEqual(inParam, outParam)
def testGetSynPermBelowStimulusInc(self):
inParam = 0.0898
self.sp.setSynPermBelowStimulusInc(inParam)
outParam = self.sp.getSynPermBelowStimulusInc()
self.assertAlmostEqual(inParam, outParam)
def testGetSynPermConnected(self):
inParam = 0.514
self.sp.setSynPermConnected(inParam)
outParam = self.sp.getSynPermConnected()
self.assertAlmostEqual(inParam, outParam)
def testGetMinPctOverlapDutyCycles(self):
inParam = 0.11122
self.sp.setMinPctOverlapDutyCycles(inParam)
outParam = self.sp.getMinPctOverlapDutyCycles()
self.assertAlmostEqual(inParam, outParam)
def testGetPermanence(self):
numInputs = 5
numColumns = 5
self.sp.initialize(columnDimensions=[numInputs],
inputDimensions=[numColumns],
potentialRadius=1,
potentialPct=1)
inParam = numpy.array(
[0.06, 0.07, 0.08, 0.12, 0.13]).astype(realType)
self.sp.setPermanence(0,inParam)
outParam = numpy.zeros(numInputs).astype(realType)
self.sp.getPermanence(0, outParam)
self.assertListEqual(list(inParam),list(outParam))
def testGetBoostFactors(self):
numInputs = 3
numColumns = 3
self.sp.initialize(columnDimensions=[numInputs],
inputDimensions=[numColumns])
inParam = numpy.array([1, 1.2, 1.3, ]).astype(realType)
self.sp.setBoostFactors(inParam)
outParam = numpy.zeros(numInputs).astype(realType)
self.sp.getBoostFactors(outParam)
self.assertListEqual(list(inParam),list(outParam))
def testGetOverlapDutyCycles(self):
numInputs = 3
numColumns = 3
self.sp.initialize(columnDimensions=[numInputs],
inputDimensions=[numColumns])
inParam = numpy.array([0.9, 0.3, 0.1]).astype(realType)
self.sp.setOverlapDutyCycles(inParam)
outParam = numpy.zeros(numInputs).astype(realType)
self.sp.getOverlapDutyCycles(outParam)
self.assertListEqual(list(inParam),list(outParam))
def testGetActiveDutyCycles(self):
numInputs = 3
numColumns = 3
self.sp.initialize(columnDimensions=[numInputs],
inputDimensions=[numColumns])
inParam = numpy.array([0.9, 0.99, 0.999, ]).astype(realType)
self.sp.setActiveDutyCycles(inParam)
outParam = numpy.zeros(numInputs).astype(realType)
self.sp.getActiveDutyCycles(outParam)
self.assertListEqual(list(inParam),list(outParam))
def testGetMinOverlapDutyCycles(self):
numInputs = 3
numColumns = 3
self.sp.initialize(columnDimensions=[numInputs],
inputDimensions=[numColumns])
inParam = numpy.array([0.01, 0.02, 0.035, ]).astype(realType)
self.sp.setMinOverlapDutyCycles(inParam)
outParam = numpy.zeros(numInputs).astype(realType)
self.sp.getMinOverlapDutyCycles(outParam)
self.assertListEqual(list(inParam),list(outParam))
def testGetPotential(self):
self.sp.initialize(columnDimensions=[3], inputDimensions=[3])
numInputs = 3
numColumns = 3
self.sp.initialize(columnDimensions=[numInputs],
inputDimensions=[numColumns])
inParam1 = numpy.array([1, 0, 1]).astype(uintType)
self.sp.setPotential(0, inParam1)
inParam2 = numpy.array([1, 1, 0]).astype(uintType)
self.sp.setPotential(1, inParam2)
outParam1 = numpy.zeros(numInputs).astype(uintType)
outParam2 = numpy.zeros(numInputs).astype(uintType)
|
KMUX-Project/KMUX | kmux/modules/root/__init__.py | Python | gpl-2.0 | 837 | 0 | '''
KMUX - a free and open source small business server.
Copyright (C) 2015, Julian Thomé <julian.thome.de@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHA | NTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should | have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
from .action.root import Root
|
joebos/django-allauth | allauth/socialaccount/providers/schedulepictures_oauth2/provider.py | Python | mit | 2,134 | 0.001874 | from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
from allauth.socialaccount import app_settings
class SchedulePicturesOAuth2Account(ProviderAccount):
def get_profile_url(self):
return self.account.extra_data.get('id')
def get_avatar_url(self):
return self.account.extra_data.get('id')
def to_str(self):
dflt = super(SchedulePicturesOAuth2Account, self).to_str()
name = self.account.extra_data.get('username', dflt)
first_name = self.account.extra_data.get('first_name', None)
last_name = self.account.extra_data.get('last_name', None)
#if first_name and last_name:
# name = first_name+' '+last_name
return name
class SchedulePicturesOAuth2Provider(OAuth2Provider):
id = 'schedulepictures_oauth2'
# Name is displayed to ordinary users -- don't include protocol
name = 'SchedulePictures'
package = 'allauth.socialaccount.providers.schedulepictures_oauth2'
account_class = SchedulePicturesOAuth2Account
def extract_uid(self, data):
return str(data['id'])
def get_profile_fields(self):
default_fields = ['id',
'first-name',
'last-name',
'email-address',
'picture-url',
'public-profile-url']
fields = self.get_settings().get('PROFILE_FIELDS',
default_fields)
return fields
def get_de | fault_scope(self):
scope = ["read", "write"]
#if app_settings.QUERY_EMAIL:
# scope.append('r_emailaddress')
return scope
def extract_common_fields(self, data):
return dict(username=data.get("username"),
email=data.get('email'),
#first_name=data.get('first_name'),
#last_name=data.get('last_name')
)
providers.registry.register(SchedulePicturesOAuth2Provider | )
|
orbitvu/django-mama-cas | mama_cas/models.py | Python | bsd-3-clause | 13,586 | 0.000957 | from __future__ import unicode_literals
from datetime import timedelta
import logging
import os
import re
import time
from django.conf import settings
from django.db import models
from django.db.models import Q
from django.utils.crypto import get_random_string
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
import requests
from mama_cas.compat import Session
from mama_cas.exceptions import InvalidProxyCallback
from mama_cas.exceptions import InvalidRequest
from mama_cas.exceptions import InvalidService
from mama_cas.exceptions import InvalidTicket
from mama_cas.exceptions import UnauthorizedServiceProxy
from mama_cas.exceptions import ValidationError
from mama_cas.request import SingleSignOutRequest
from mama_cas.services import get_logout_url
from mama_cas.services import logout_allowed
from mama_cas.services import service_allowed
from mama_cas.services import proxy_allowed
from mama_cas.services import proxy_callback_allowed
from mama_cas.utils import add_query_params
from mama_cas.utils import clean_service_url
from mama_cas.utils import is_scheme_https
from mama_cas.utils import match_service
logger = logging.getLogger(__name__)
class TicketManager(models.Manager):
def create_ticket(self, ticket=None, **kwargs):
"""
Create a new ``Ticket``. Additional arguments are passed to the
``create()`` function. Return the newly created ``Ticket``.
"""
if not ticket:
ticket = self.create_ticket_str()
if 'service' in kwargs:
kwargs['service'] = clean_service_url(kwargs['service'])
if 'expires' not in kwargs:
expires = now() + timedelta(seconds=self.model.TICKET_EXPIRE)
kwargs['expires'] = expires
t = self.create(ticket=ticket, **kwargs)
logger.debug("Created %s %s" % (t.name, t.ticket))
return t
def create_ticket_str(self, prefix=None):
"""
Generate a sufficiently opaque ticket string to ensure the ticket is
not guessable. If a prefix is provided, prepend it to the string.
"""
if not prefix:
prefix = self.model.TICKET_PREFIX
return "%s-%d-%s" % (prefix, int(time.time()),
get_random_string(length=self.model.TICKET_RAND_LEN))
def validate_ticket(self, ticket, service, renew=False, require_https=False):
"""
Given a ticket string and service identifier, validate the
corresponding ``Ticket``. If validation succeeds, return the
``Ticket``. If validation fails, raise an appropriate error.
If ``renew`` is ``True``, ``ServiceTicket`` validation will
only succeed if the ticket was issued from the presentation
of the user's primary credentials.
If ``require_https`` is ``True``, ``ServiceTicket`` validation
will only succeed if the service URL scheme is HTTPS.
"""
if not ticket:
raise InvalidRequest("No ticket string provided")
if not self.model.TICKET_RE.match(ticket):
raise InvalidTicket("Ticket string %s is invalid" % ticket)
try:
t = self.get(ticket=ticket)
except self.model.DoesNotExist:
raise InvalidTicket("Ticket %s does not exist" % ticket)
if t.is_consumed():
raise InvalidTicket("%s %s has already been used" %
(t.name, ticket))
if t.is_expired():
raise InvalidTicket("%s %s has expired" % (t.name, ticket))
if not service:
raise InvalidRequest("No service identifier provided")
if require_https and not is_scheme_https(service):
raise InvalidService("Service %s is not HTTPS" % service)
if not service_allowed(service):
raise InvalidService("Service %s is not a valid %s URL" %
(service, t.name))
try:
if not match_service(t.service, service):
raise InvalidService("%s %s for service %s is invalid for "
"service %s" % (t.name, ticket, t.service, service))
except AttributeError:
pass
try:
if renew and not t.is_primary():
raise InvalidTicket("%s %s was not issued via primary "
"credentials" % (t.name, ticket))
except AttributeError:
pass
logger.debug("Validated %s %s" % (t.name, ticket))
return t
def delete_invalid_tickets(self):
"""
Delete consumed or expired ``Ticket``s that are not referenced
by other ``Ticket``s. Invalid tickets are no longer valid for
authentication and can be safely deleted.
A custom management command is provided that executes this method
on all applicable models by running ``manage.py cleanupcas``.
"""
for ticket in self.filter(Q(consumed__isnull=False) |
Q(expires__lte=now())).order_by('-expires'):
try:
ticket.delete()
except models.ProtectedError:
pass
def consume_tickets(self, user):
"""
Consume all valid ``Ticket``s for a specified user. This is run
when the user logs out to ensure all issued tickets are no longer
valid for future authentication attempts.
"""
for ticket in self.filter(user=user, consumed__isnull=True,
expires__gt=now()):
ticket.consume()
@python_2_unicode_compatible
class Ticket(models.Model):
"""
``Ticket`` is an abstract base class implementing common methods
and fields for CAS tickets.
"""
TICKET_EXPIRE = getattr(settings, 'MAMA_CAS_TICKET_EXPIRE', 90)
TICKET_RAND_LEN = getattr(settings, 'MAMA_CAS_TICKET_RAND_LEN', 32)
TICKET_RE = re.compile("^[A-Z]{2,3}-[ | 0-9]{10,}-[a-zA-Z0-9]{%d}$" % TICKET_RAND_LEN)
ticket = models.CharField(_('ticket'), max_length=255, unique=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('user'),
on_delete=models.CASCADE)
expires = models.DateTime | Field(_('expires'))
consumed = models.DateTimeField(_('consumed'), null=True)
objects = TicketManager()
class Meta:
abstract = True
def __str__(self):
return self.ticket
@property
def name(self):
return self._meta.verbose_name
def consume(self):
"""
Consume a ``Ticket`` by populating the ``consumed`` field with
the current datetime. A consumed ``Ticket`` is invalid for future
authentication attempts.
"""
self.consumed = now()
self.save()
def is_consumed(self):
"""
Check a ``Ticket``s consumed state, consuming it in the process.
"""
if self.consumed is None:
self.consume()
return False
return True
def is_expired(self):
"""
Check a ``Ticket``s expired state. Return ``True`` if the ticket is
expired, and ``False`` otherwise.
"""
return self.expires <= now()
class ServiceTicketManager(TicketManager):
def request_sign_out(self, user):
"""
Send a single logout request to each service accessed by a
specified user. This is called at logout when single logout
is enabled.
If requests-futures is installed, asynchronous requests will
be sent. Otherwise, synchronous requests will be sent.
"""
session = Session()
for ticket in self.filter(user=user, consumed__gte=user.last_login):
ticket.request_sign_out(session=session)
class ServiceTicket(Ticket):
"""
(3.1) A ``ServiceTicket`` is used by the client as a credential to
obtain access to a service. It is obtained upon a client's presentation
of credentials and a service identifier to /login.
"""
TICKET_PREFIX = 'ST'
service = models.CharField(_('service') |
adaptive-learning/flocs-web | tasks/models/instruction.py | Python | gpl-3.0 | 328 | 0 | from django.db import models
from flocs import entities
from flocsweb.mixins import Import | ExportMixin
class Instruction(models.Model, ImportExportMixin):
entity_class = entities.Instruction
instruction_id = models.CharField(max_length=256, primary_key=True)
def __str__(se | lf):
return self.instruction_id
|
caleblogan/twitter-scheduler | twitterscheduler/signals.py | Python | mit | 371 | 0 | from django.db.models.signals import post_save
from django.dispatch import receiver
from django.core.signals import request_finished
from django.cont | rib.auth.models import User
from .models import Profile
@r | eceiver(post_save, sender=User)
def create_profile_receiver(sender, **kwargs):
if kwargs['created']:
Profile.objects.create(user=kwargs['instance'])
|
nsmoooose/csp | csp/tools/layout2/scripts/ui/controls/XmlPropertiesPaneItem.py | Python | gpl-2.0 | 11,459 | 0.040841 | import weakref
import wx
from AutoFitTextCtrl import AutoFitTextCtrl
from ...data import XmlNode
from ..commands.ModifyXmlAction import ModifyXmlDataAction
from ..commands.ModifyXmlAction import ModifyXmlAttributeAction
class ItemUpdater(object):
def __init__(self, propertiesPane, *args, **kwargs):
self.propertiesPane = propertiesPane
def UpdateItem(self, item):
pass
def ItemExpanding(self, item):
pass
def GetErrorMessage(self, node):
return '\n'.join( "- %s" % error for error in node.errors.itervalues() )
def SetItemImage(self, item, image):
self.propertiesPane.tree.SetItemImage(item, self.propertiesPane.TreeImages()[image])
def SetItemWindow(self, item, itemWindow, modifyWindowFactory = None):
self.propertiesPane.tree.DeleteItemWindow(item)
errorMessage = self.GetErrorMessage(item.xmlNode)
if errorMessage or itemWindow is not None:
itemWindow = ItemWindowWithError(self.propertiesPane.tree, errorMessage, itemWindow, modifyWindowFactory, item.xmlNode)
self.propertiesPane.tree.SetItemWindow(item, itemWindow)
def GetActualImageName(self, itemImageName):
return self.propertiesPane.ImageListItemNames()[ itemImageName ]
class ItemUpdaterWithoutChildren(ItemUpdater):
def UpdateItem(self, item):
if item.xmlChangeCount != item.xmlNode.changeCount:
self.UpdateLocalChanges(item)
item.xmlChangeCount = item.xmlNode.changeCount
def UpdateLocalChanges(self, item):
pass
class ItemUpdaterData(ItemUpdaterWithoutChildren):
def UpdateLocalChanges(self, item):
if self.propertiesPane.tree.GetItemImage(item) < 0:
self.SetItemImage( item, self.GetItemImage() )
self.propertiesPane.tree.SetItemText( item, self.GetItemText() )
itemWindow = ItemWindowSimple(self.propertiesPane.tree, item, value = item.xmlNode.GetText(), style = wx.TE_READONLY)
self.SetItemWindow(item, itemWindow, self.CreateModifyWindow)
def GetItemImage(self):
return ''
def GetItemTe | xt(self):
return ''
def CreateModifyWindow(self, parent, node):
return ModifyWindowData( parent, node, self.GetItemText(), self.GetActualImageName( self.GetItemImage() ) )
class ModifyWindowData(wx.TextCtrl):
def __init__(self, parent, node, nodeName, imageName):
wx.TextCtrl.__init__(self, parent, value = node.GetText(), style = wx.TE_MULTILINE | wx.TE_DONTWRAP)
self.node = node
self.nodeName = nodeName |
self.imageName = imageName
def GetAction(self):
return ModifyXmlDataAction( self.nodeName, self.imageName, self.node, self.GetValue() )
class ItemUpdaterText(ItemUpdaterData):
NodeClass = XmlNode.XmlNodeText
def GetItemImage(self):
return 'text'
def GetItemText(self):
return 'Text'
class ItemUpdaterComment(ItemUpdaterData):
NodeClass = XmlNode.XmlNodeComment
def GetItemImage(self):
return 'comment'
def GetItemText(self):
return 'Comment'
class ItemUpdaterAttribute(ItemUpdaterWithoutChildren):
NodeClass = XmlNode.XmlNodeAttribute
def UpdateLocalChanges(self, item):
if self.propertiesPane.tree.GetItemImage(item) < 0:
self.SetItemImage(item, 'attribute')
self.propertiesPane.tree.SetItemText( item, item.xmlNode.GetName() )
itemWindow = ItemWindowSimple(self.propertiesPane.tree, item, value = item.xmlNode.GetValue(), style = wx.TE_READONLY)
self.SetItemWindow(item, itemWindow, self.CreateModifyWindow)
def CreateModifyWindow(self, parent, node):
return ModifyWindowAttribute( parent, node, self.GetActualImageName('attribute') )
class ModifyWindowAttribute(wx.TextCtrl):
def __init__(self, parent, node, imageName):
wx.TextCtrl.__init__( self, parent, value = node.GetValue() )
self.node = node
self.imageName = imageName
def GetAction(self):
return ModifyXmlAttributeAction( self.imageName, self.node, self.GetValue() )
class ItemUpdaterWithChildren(ItemUpdater):
def UpdateItem(self, item):
firstUpdate = item.xmlChangeCount == -1
if item.xmlChangeCount != item.xmlNode.changeCount:
self.UpdateLocalChanges(item)
if self.propertiesPane.tree.IsExpanded(item):
if self.AddRemoveChildren(item):
# New nodes where added, force an update
item.xmlChildrenChangeCount -= 1
else:
if len(list( self.GetNodeChildren(item) )):
self.propertiesPane.tree.SetItemHasChildren(item, True)
# Force an update when expanding
if item.xmlChildrenChangeCount == item.xmlNode.childrenChangeCount:
item.xmlChildrenChangeCount -= 1
else:
self.propertiesPane.tree.DeleteChildren(item)
self.propertiesPane.tree.SetItemHasChildren(item, False)
item.xmlChangeCount = item.xmlNode.changeCount
if self.propertiesPane.tree.IsExpanded(item):
if item.xmlChildrenChangeCount != item.xmlNode.childrenChangeCount:
self.UpdateChildren(item)
item.xmlChildrenChangeCount = item.xmlNode.childrenChangeCount
if firstUpdate and item.level == 1:
# Expand the XML rootElement
self.propertiesPane.tree.Expand(item)
def ItemExpanding(self, item):
if item.xmlChildrenChangeCount != item.xmlNode.childrenChangeCount:
self.AddRemoveChildren(item)
self.UpdateChildren(item)
item.xmlChildrenChangeCount = item.xmlNode.childrenChangeCount
def UpdateLocalChanges(self, item):
pass
def GetNodeChildren(self, item):
return item.xmlNode.GetChildren()
def AddRemoveChildren(self, item):
nodeChildren = list( self.GetNodeChildren(item) )
# Remove unused items
itemsToRemove = []
for itemChild in self.propertiesPane.GetItemChildren(item):
if itemChild.xmlNode not in nodeChildren:
itemsToRemove.append(itemChild)
for itemChild in itemsToRemove:
self.propertiesPane.tree.Delete(itemChild)
itemsToRemove = None
# Add missing items
newChildren = False
itemChild, unused = self.propertiesPane.tree.GetFirstChild(item)
for nodeChild in nodeChildren:
if itemChild is None:
# Missing item
self.AppendItemForXmlNode(item, nodeChild)
newChildren = True
elif itemChild.xmlNode is not nodeChild:
# Missing item
self.InsertItemForXmlNodeBefore(item, itemChild, nodeChild)
newChildren = True
else:
# Existing item
itemChild = self.propertiesPane.tree.GetNextSibling(itemChild)
return newChildren
def AppendItemForXmlNode(self, parentItem, xmlNode):
item = self.propertiesPane.tree.AppendItem(parentItem, "")
self.propertiesPane.InitItemForXmlNode(item, xmlNode, parentItem.level + 1)
return item
def InsertItemForXmlNodeBefore(self, parentItem, nextItem, xmlNode):
previousItem = self.propertiesPane.tree.GetPrevSibling(nextItem)
item = self.propertiesPane.tree.InsertItem(parentItem, previousItem, "")
self.propertiesPane.InitItemForXmlNode(item, xmlNode, parentItem.level + 1)
return item
def UpdateChildren(self, item):
for itemChild in self.propertiesPane.GetItemChildren(item):
self.propertiesPane.UpdateItem(itemChild)
def GetErrorMessage(self, node):
message = super(ItemUpdaterWithChildren, self).GetErrorMessage(node)
childrenErrorCount = node.childrenErrorCount
if childrenErrorCount:
childrenMessage = "- There are %d errors in children nodes" % node.childrenErrorCount
if message:
return '\n'.join( (message, childrenMessage) )
else:
return childrenMessage
else:
return message
class ItemUpdaterDocument(ItemUpdaterWithChildren):
NodeClass = XmlNode.XmlNodeDocument
def UpdateLocalChanges(self, item):
if item is self.propertiesPane.root:
self.SetItemWindow( item, self.propertiesPane.CreateRootWindow() )
else:
if self.propertiesPane.tree.GetItemImage(item) < 0:
self.SetItemImage(item, 'root')
self.propertiesPane.tree.SetItemText(item, 'XML document')
itemWindow = ItemWindowSimple(self.propertiesPane.tree, item, value = item.xmlNode.documentOwner.GetFileName(), style = wx.TE_READONLY)
self.SetItemWindow(item, itemWindow)
class ItemUpdaterElement(ItemUpdaterWithChildren):
NodeClass = XmlNode.XmlNodeElement
|
patpatpatpatpat/stormpath-django | django_stormpath/urls.py | Python | apache-2.0 | 2,179 | 0.005966 | from django.conf.urls import url
from django.conf import settings
from django_stormpath import views
urlpatterns = [
url(r'^login/$', views.stormpath_id_site_login, name='stormpath_id_site_login'),
url(r'^logout/$', views.stormpath_id_site_logout, name='stormpath_id_site_logout'),
url(r'^register/$', views.stormpath_id_site_register, name='stormpath_id_site_register'),
url(r'^forgot-password/$', views.stormpath_id_site_forgot_password, name='stormpath_id_site_forgot_password'),
url(r'^handle-callback/(?P<provider>stormpath)', views.stormpath_callback, name='stormpath_id_site_callback'),
]
if getattr(settings, 'STORMPATH_ENABLE_GOOGLE', False):
urlpatterns += [
url(r'handle-callback/(?P<provider>google)', views.stormpath_callback,
name='stormpath_google_login_callback'),
url(r'^social-login/(?P<provider>google)/', views.stormpath_social_login,
name='stormpath_google_social_login'),
]
if getattr(settings, 'STORMPATH_ENABLE_FACEBOOK', False):
urlpatterns += [
url(r'handle-callback/(?P<provider>facebook)', views.stormpath_callback,
name='stormpath_facebook_login_callback'),
url(r'^social-login/(?P<provider>facebook)/', views.stormpath_social_login,
name='stormpath_facebook_social_login'),
]
if getattr(settings, 'STORMPATH_ENABLE_GITHUB', False):
urlpatterns += [
url(r'handle-callback/(?P<provider>github)', views.stormpath_callback,
name='stormpath_github_login_callback'),
url(r'^social-login/(?P<provider>github)/', views.stormpath_social_login,
name='stormpath_github_social_login'),
]
if getattr(settings, 'STORMPATH_ENABLE_LINKEDIN', False):
urlpatterns += [
url(r'handle-callback/(?P<provider>linkedin)', views.stormp | ath_callback,
name='sto | rmpath_linkedin_login_callback'),
url(r'^social-login/(?P<provider>linkedin)/', views.stormpath_social_login,
name='stormpath_linkedin_social_login'),
]
if django.VERSION[:2] < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('django_stormpath.views', *urlpatterns)
|
sigmunau/nav | python/nav/web/seeddb/utils/edit.py | Python | gpl-2.0 | 6,406 | 0.000156 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 2 as published by the Free
# Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Functions for editing general objects in seeddb.
(Not netboxes and services).
"""
import logging
from IPy import IP
from socket import gethostbyaddr, gethostbyname, error as SocketError
from django.core.urlresolvers import reverse, NoReverseMatch
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import HttpResponseRedirect, Http404
from django.db.models import Q
from nav.web.message import new_message, Messages
from nav.models.manage import Netbox, NetboxCategory, NetboxGroup
_logger = logging.getLogger(__name__)
def render_edit(request, model, form_model, object_id, redirect,
template='seeddb/edit.html',
lon=None, lat=None, extra_context=None):
"""Handles editing for objects in seeddb."""
if not extra_context:
extra_context = {}
obj = _get_object(model, object_id)
verbose_name = model._meta.verbose_name
if not obj and (lat and lon):
obj = model(position='({0},{1})'.format(lat, lon))
if request.method == 'POST':
form = form_model(request.POST, instance=obj)
if form.is_valid():
# TODO: It's kinda hackish to put this here. Discuss during review
# Store devices in group when editing a device group (which we
# have no idea if we are doing or not)
if model == NetboxGroup:
netboxes = request.POST.getlist('netboxes')
_logger.debug('netboxes in group: %s', netboxes)
# Save model but make sure m2m is not saved. See
# https://docs.djangoproject.com/en/1.4/topics/db/models
# /#extra-fields-on-many-to-many-relationships
obj = form.save(commit=False)
obj.save()
_connect_group_to_devices(obj, netboxes)
else:
obj = form.save()
new_message(request, "Saved %s %s" % (verbose_name, obj),
Messages.SUCCESS)
try:
return HttpResponseRedirect(reverse(redirect, args=(obj.pk,)))
except NoReverseMatch:
return HttpResponseRedirect(reverse(redirect))
else:
form = form_model(instance=obj)
context = {
'object': obj,
'form': form,
'title': 'Add new %s' % verbose_name,
'verbose_name': verbose_name,
'sub_active': {'add': True},
}
if obj and obj.pk:
context.update({
'title': 'Edit %s "%s"' % (verbose_name, obj),
'sub_active': {'edit': True},
})
extra_context.update(context)
return render_to_response(template, extra_context, RequestContext(request))
def _get_object(model, object_id, identifier_attr='pk'):
"""Get object if it exists, else raise 404"""
try:
return get_object_or_404(model, **{identifier_attr: object_id})
except Http404:
pass
def resolve_ip_and_sysname(name):
"""Given a name that can be either an ip or a hostname/domain name, this
function looks up IP and hostname.
name - ip or hostname
Returns:
- tuple with ip-addres and sysname
"""
try:
ip_addr = IP(name)
except ValueError:
ip_addr = IP(gethostbyname(name))
try:
sysname = gethostbyaddr(unicode(ip_addr))[0]
except SocketError:
sysname = unicode(ip_addr)
return (ip_addr, sysname)
def does_ip_exist(ip_addr, netbox_id=None):
"""Checks if the given IP already exist in database.
Parameters:
* ip_addr - the IP addres to look for.
* netbox_id - a netbox primary key that can have the given ip_addr, and
the function will still return False.
Returns:
- True if the IP already exists in the database (and the netbox with the
IP is not the same as the given netbox_id).
- False if not.
"""
if netbox_id:
ip_qs = Netbox.objects.filter(Q(ip=unicode(ip_addr)), ~Q(id=netbox_id))
else:
ip_qs = Netbox.objects.filter(ip=unicode(ip_addr))
return ip_qs.count() > 0
def does_sysname_exist(sysname, netbox_id=None):
"""Checks if given sysname exists in database.
Parameters:
* sysname - the sysname addres to look for.
* netbox_id - a netbox primary key that can have the given ip_addr, and
the function will still return False.
Returns:
- True if the sysname already exists in the database (and the netbox with
the sysname is not the same as the given netbox_id).
- False if not.
"""
if netbox_id:
sysname_qs = Netbox.objects.filter(Q(sysname=sysname), ~Q(id=netbox_id))
else:
sysname_qs = Netbox.objects.filter(sysname=sysname)
return sysname_qs.count() > 0
def _connect_group_to_devices(group, netbox_ids):
"""
Connect a NetboxGroup and Netboxe | s by creating instances of
NetboxCategories
:param nav.models.manage.NetboxGroup group: A netboxgroup
:param list[str] netbox_ids: a result from a request.POST.getlist that
| should contain netbox id's as strings
"""
netboxids = [int(x) for x in netbox_ids]
# Delete existing netboxcategories that are not in request
NetboxCategory.objects.filter(category=group).exclude(
netbox__pk__in=netboxids).delete()
# Add new netboxcategories that are in request
for netboxid in netboxids:
try:
NetboxCategory.objects.get(category=group, netbox__pk=netboxid)
except NetboxCategory.DoesNotExist:
netbox = Netbox.objects.get(pk=netboxid)
NetboxCategory.objects.create(category=group, netbox=netbox)
|
meissnert/StarCluster-Plugins | gess_1_0.py | Python | mit | 962 | 0.022869 | from starcluster.clustersetup import ClusterSetup
from starcluster.logger import log
class GessInstaller(ClusterSetup):
def run(self, nodes, master, user, user_shell, volumes):
for node in nodes:
log.info("Installing GESS 1.0 on %s" % | (node.alias))
node.ssh.execute('wget -c -P /opt/software/gess/1.0 http://compbio.uthscsa.edu/GESS_Web/files/gess.src.tar.gz')
n | ode.ssh.execute('cd /opt/software/gess/1.0/ && tar xzf /opt/software/gess/1.0/gess.src.tar.gz')
node.ssh.execute('chmod +x /opt/software/gess/1.0/gess/GESS.py')
node.ssh.execute('mkdir -p /usr/local/Modules/applications/gess/;touch /usr/local/Modules/applications/gess/1.0')
node.ssh.execute('echo "#%Module" >> /usr/local/Modules/applications/gess/1.0')
node.ssh.execute('echo "set root /opt/software/gess/1.0/gess" >> /usr/local/Modules/applications/gess/1.0')
node.ssh.execute('echo -e "prepend-path\tPATH\t\$root" >> /usr/local/Modules/applications/gess/1.0')
|
BigBoss424/webtutorials | python/learnpythonhard/ex/ex3.py | Python | gpl-3.0 | 474 | 0 | print "I will now count my chickens:"
print "Hens", 25+30 / 6
print "Roosters", 100 - 2 | 5 * 3 % 4
print "Now I will count the eggs:"
print 3 + 2 + 1 - 5 + 4 % 2 - 1 / 4 + 6
print "Is it true that 3 + 2 < 5 - 7?"
print 3 + 2 < 5 - 7
print "What is 3 + 2?", 3 + 2
print "What is 5 - 7?", 5 - 7
print "Oh, that's why it's False."
print "How about some more."
print "Is it greater?", 5 > -2
print "Is is greater or equal?", 5 >= -2
print "Is it less or | equal?", 5 <= -2
|
dannysellers/django_orders | tracker/views/misc_views.py | Python | gpl-2.0 | 3,196 | 0.019399 | from datetime import date, timedelta
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Group
from django.contrib import messages
from django.contrib.auth.views import password_reset, password_reset_confirm
from django.core.urlresolvers import reverse
from django.db.models import Sum
from django.template import RequestContext
from django.shortcuts import render_to_response, HttpResponseRedirect, render
from ..models import Shipment, Inventory, Customer, WorkOrder
def index (request):
context = RequestContext(request)
context_dict = dict()
if not request.user.is_authenticated():
return render_to_response('tracker/login.html', context_dict, context)
else:
context_dict['cust_act_count'] = Customer.objects.filter(status = 1).count()
context_dict['item_count'] = Inventory.objects.exclude(status = 4).count()
context_dict['ship_count'] = Shipment.objects.exclude(status = 4).count()
context_dict['total_item_volume'] = Inventory.objects.exclude(status = 4).aggregate(
Sum('volume'))['volume__sum']
context_dict['work_order_count'] = WorkOrder.objects.exclude(status = 4).exclude(
status = 999).count()
context_dict['unmatched_orders'] = WorkOrder.objects.exclude(status = 4).exclude(
status = 999).exclude(shipment__isnull = False).count()
ten_days_ago = date.today() - timedelta(days = 10)
context_dict['item_storage_count'] = Inventory.objects.exclude(status = 4) \
.filter(arrival__lte = ten_days_ago).count()
context_dict['item_no_storage_count'] = Inventory.objects.exclude(status = 4) \
.filter(arrival__range = (ten_days_ago, date.today())).count()
return render_to_response('tracker/index.html', context_dict, context)
def reset (request): |
""" Wrap the built-in password reset view and pass it the arguments
like the template name, email template name, subject template name
and the url to | redirect after the password reset is initiated.
These views aren't login-restricted because end-users will be able to
use these views to reset their passwords (at least until I write an
equivalent API endpoint). """
# From http://runnable.com/UqMu5Wsrl3YsAAfX/using-django-s-built-in-views-for-password-reset-for-python
return password_reset(request, template_name = 'reset.html',
email_template_name = 'reset_email.html',
subject_template_name = 'reset_subject.txt',
post_reset_redirect = reverse('password_reset_success'))
def reset_confirm (request, uidb64=None, token=None):
"""
Display form for resetting password (link sent in email).
"""
return password_reset_confirm(request, template_name = 'reset_confirm.html',
uidb64 = uidb64, token = token,
post_reset_redirect = reverse('password_reset_success'))
def success (request):
"""
Show template confirming the password reset email has been sent successfully.
"""
return render(request, 'success.html') |
seppius-xbmc-repo/ru | plugin.video.ndr/simpleplugin.py | Python | gpl-2.0 | 59,566 | 0.001679 | # -*- coding: utf-8 -*-
# Created on: 03.06.2015
"""
SimplePlugin micro-framework for Kodi content plugins
**Author**: Roman Miroshnychenko aka Roman V.M.
**License**: `GPL v.3 <https://www.gnu.org/copyleft/gpl.html>`_
"""
import os
import sys
import re
import inspect
import time
import pickle
import codecs
from urllib.parse import parse_qs, urlparse
from urllib.parse import urlencode, quote_plus, unquote_plus
from functools import wraps
from collections import MutableMapping, namedtuple
from copy import deepcopy
from types import GeneratorType
from hashlib import md5
from shutil import copyfile
from contextlib import contextmanager
from pprint import pformat
from platform import uname
import xbmcaddon
import xbmc
import xbmcplugin
import xbmcgui
import xbmcvfs
__all__ = ['SimplePluginError', 'Storage', 'MemStorage', 'Addon', 'Plugin', 'RoutedPlugin', 'Params', 'debug_exception']
ListContext = namedtuple('ListContext', ['listing', 'succeeded',
'update_listing', 'cache_to_disk',
'sort_methods', 'view_mode',
'content', 'category'])
PlayContext = namedtuple('PlayContext', ['path', 'play_item', 'succeeded'])
Route = namedtuple('Route', ['pattern', 'func'])
class SimplePluginError(Exception):
"""Custom exception"""
pass
class TimeoutError(SimplePluginError):
pass
def _format_vars(variables):
"""
Format variables dictionary
:param variables: variables dict
:type variables: dict
:return: formatted string with sorted ``var = val`` pairs
:rtype: str
"""
var_list = [(var, val) for var, val in variables.items()]
lines = []
for var, val in sorted(var_list, key=lambda i: i[0]):
if not (var.startswith('__') or var.endswith('__')):
lines.append('{0} = {1}'.format(var, pformat(val)))
return '\n'.join(lines)
@contextmanager
def debug_exception(logger=None):
"""
Diagnostic helper context manager
It controls execution within its context and writes extended
diagnostic info to the Kodi log if an unhandled exception
happens within the context. The info includes the following items:
- System info
- Kodi version
- Module path.
- Code fragment where the exception has happened.
- Global variables.
- Local variables.
After logging the diagnostic info the exception is re-raised.
Example::
with debug_exception():
# Some risky code
raise RuntimeError('Fatal error!')
:param logger: logger function which must accept a single argument
which is a log message. By default it is :func:`xbmc.log`
with ``ERROR`` level.
"""
try:
yield
except:
if logger is None:
logger = lambda msg: xbmc.log(msg, xbmc.LOGERROR)
frame_info = inspect.trace(5)[-1]
logger('Unhandled exception detected!')
logger('*** Start diagnostic info ***')
logger('System info: {0}'.format(uname()))
logger('OS info: {0}'.format(xbmc.getInfoLabel('System.OSVersionInfo')))
logger('Kodi version: {0}'.format(
xbmc.getInfoLabel('System.BuildVersion'))
)
logger('File: {0}'.format(frame_info[1]))
context = ''
if frame_in | fo[4] is not None:
for i, line in enumerate(frame_info[4], frame_info[2] - frame_info[5]):
if i == frame_info[2]:
context += '{0}:>{1}'.format(str(i).rjust(5), line)
|
else:
context += '{0}: {1}'.format(str(i).rjust(5), line)
logger('Code context:\n' + context)
# logger('Global variables:\n' + _format_vars(frame_info[0].f_globals))
#logger('Local variables:\n' + _format_vars(frame_info[0].f_locals))
logger('**** End diagnostic info ****')
raise
class Params(dict):
"""
Params(**kwargs)
A class that stores parsed plugin call parameters
Parameters can be accessed both through :class:`dict` keys and
instance properties.
.. note:: For a missing parameter an instance property returns ``None``.
Example:
.. code-block:: python
@plugin.action('foo')
def action(params):
foo = params['foo'] # Access by key
bar = params.bar # Access through property. Both variants are equal
"""
def __getattr__(self, key):
return self.get(key)
def __str__(self):
return '<Params {0}>'.format(super(Params, self).__repr__())
def __repr__(self):
return '<simpleplugin.Params object {0}>'.format(super(Params, self).__repr__())
class Storage(MutableMapping):
"""
Storage(storage_dir, filename='storage.pcl')
Persistent storage for arbitrary data with a dictionary-like interface
It is designed as a context manager and better be used
with 'with' statement.
:param storage_dir: directory for storage
:type storage_dir: str
:param filename: the name of a storage file (optional)
:type filename: str
Usage::
with Storage('/foo/bar/storage/') as storage:
storage['key1'] = value1
value2 = storage['key2']
.. note:: After exiting :keyword:`with` block a :class:`Storage` instance is invalidated.
Storage contents are saved to disk only for a new storage or if the contents have been changed.
"""
def __init__(self, storage_dir, filename='storage.pcl'):
"""
Class constructor
:type storage_dir: str
:type filename: str
"""
self._storage = {}
self._hash = None
self._filename = os.path.join(storage_dir, filename)
try:
with open(self._filename, 'rb') as fo:
contents = fo.read()
self._storage = pickle.loads(contents)
self._hash = md5(contents).hexdigest()
except (IOError, pickle.PickleError, EOFError, AttributeError):
pass
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.flush()
def __getitem__(self, key):
return self._storage[key]
def __setitem__(self, key, value):
self._storage[key] = value
def __delitem__(self, key):
del self._storage[key]
def __iter__(self):
return iter(self._storage)
def __len__(self):
return len(self._storage)
def __str__(self):
return '<Storage {0}>'.format(self._storage)
def __repr__(self):
return '<simpleplugin.Storage object {0}>'.format(self._storage)
def flush(self):
"""
Save storage contents to disk
This method saves new and changed :class:`Storage` contents to disk
and invalidates the Storage instance. Unchanged Storage is not saved
but simply invalidated.
"""
contents = pickle.dumps(self._storage)
if self._hash is None or md5(contents).hexdigest() != self._hash:
tmp = self._filename + '.tmp'
start = time.time()
while os.path.exists(tmp):
if time.time() - start > 2.0:
raise TimeoutError(
'Exceeded timeout for saving {0} contents!'.format(self)
)
xbmc.sleep(100)
try:
with open(tmp, 'wb') as fo:
fo.write(contents)
copyfile(tmp, self._filename)
finally:
os.remove(tmp)
del self._storage
def copy(self):
"""
Make a copy of storage contents
.. note:: this method performs a *deep* copy operation.
:return: a copy of storage contents
:rtype: dict
"""
return deepcopy(self._storage)
class MemStorage(MutableMapping):
"""
MemStorage(storage |
okor/thumbor | thumbor/detectors/feature_detector/__init__.py | Python | mit | 1,356 | 0.001475 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
import cv2
import numpy as np
from thumbor.detectors import BaseDetector
from thumbor.point import FocalPoint
from thumbor.utils import logger
class Detector(BaseDetector):
def detect(self, callback):
eng | ine = self.context.modules.engine
try:
img = np.array(
engine.convert_to_grayscale(
update_image=False,
with_alpha=False
)
)
except Exception as e:
logger.exception(e)
logger.warn('Error during feature detection; | skipping to next detector')
self.next(callback)
return
points = cv2.goodFeaturesToTrack(
img,
maxCorners=20,
qualityLevel=0.04,
minDistance=1.0,
useHarrisDetector=False,
)
if points is not None:
for point in points:
x, y = point.ravel()
self.context.request.focal_points.append(FocalPoint(x.item(), y.item(), 1))
callback()
else:
self.next(callback)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.